import joblib
import plotly
import optuna
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import random_split
import torchvision
import torchvision.transforms as transforms
from sklearn.decomposition import PCA
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score, mean_squared_error
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from sklearn.pipeline import Pipeline
from sklearn.model_selection import KFold, StratifiedKFold
from tensorflow.keras import layers
from tensorflow.keras.metrics import RootMeanSquaredError
from tensorflow.keras.models import Sequential
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.utils import plot_model
from pprint import pprint
from patsy import dmatrices
import pandas as pd
import numpy as np
import os
import time
import math
import pickle
import random
from ann_visualizer.visualize import ann_viz
from matplotlib import pyplot as plt
import matplotlib.style as style
style.use('fivethirtyeight')
%matplotlib inline
def MAPE_pytorch(y_true, pred):
return torch.mean(torch.abs((y_true - pred) / y_true)) * 100
def RMSE_pytorch(y_true, pred):
return torch.sqrt(F.mse_loss(pred, y_true))
def R2_pytorch(y_true, pred):
target_mean = torch.mean(y_true)
ss_tot = torch.sum((y_true - target_mean) ** 2)
ss_res = torch.sum((y_true - pred) ** 2)
r2 = 1 - (ss_res/ss_tot)
return r2
def rmse(targets, predictions, **kwargs):
targets, predictions = np.array(targets), np.array(predictions)
return np.sqrt(mean_squared_error(targets, predictions))
def evaluate(model, test_features, test_labels):
predictions = model.predict(test_features)
errors = abs(predictions - test_labels)
mape = 100 * np.mean(errors / test_labels)
accuracy = 100 - mape
print('Average Error: {:0.4f} degrees.'.format(np.mean(errors)))
print('Accuracy = {:0.2f}%.'.format(accuracy))
return accuracy
def mean_absolute_percentage_error(y_true, y_pred, **kwargs):
# src: https://stats.stackexchange.com/questions/58391/mean-absolute-percentage-error-mape-in-scikit-learn
y_true, y_pred = np.array(y_true), np.array(y_pred)
return np.mean(np.abs((y_true - y_pred) / y_true)) * 100
d191 = pd.read_csv("data/cluster2_daily_2019.csv")
d192 = pd.read_csv("data/cluster3_daily_2019.csv")
d181 = pd.read_csv("data/cluster2_daily_2018.csv")
d182 = pd.read_csv("data/cluster3_daily_2018.csv")
d171 = pd.read_csv("data/cluster2_daily_2017.csv")
d172 = pd.read_csv("data/cluster3_daily_2017.csv")
print(f"d191.shape: {d191.shape}, d191.shape: {d191.shape}\nd181.shape: {d181.shape}, d182.shape: {d182.shape}\nd171.shape: {d171.shape}, d172.shape: {d172.shape}")
d191.shape: (1483, 57), d191.shape: (1483, 57) d181.shape: (1299, 57), d182.shape: (579, 57) d171.shape: (1332, 57), d172.shape: (1079, 57)
# Combine all three years data together
all_data = pd.concat([d191, d192, d181, d182, d171, d172, ], axis=0)
data_dropped = all_data.drop(["Date", "Unnamed: 0", "Vendor_Site_Id", "X", "i.X", "i.X.1", "X.1",
"factor", "STATION", "newdate", "Total_ADTVolume", "year", "month", "day", "fit.cluster"], axis=1)
data_dropped.shape
(6746, 46)
# Replace all NAN value with Zero
data_filled = data_dropped.fillna(0)
data_filled.describe()
| OD_line | Destination | Origin | Bike.lane..ft. | Buffered.bike.lane..ft. | Enhanced.shared.roadway..ft. | Protected.bike.lane..ft. | Neighbor.green.way..ft. | Off.street.path.trail..ft. | Primary.Arterial..ft. | ... | pct_of_African.American.population | pct_of_White.population | Meadian.Household.Income.000.. | Education | Avg.Temp | Avg.Humidity | PreciP | Weekend | DailyVolume | strava_volume | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | ... | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 |
| mean | 24.565965 | 4.370738 | 4.370145 | 1174.402462 | 633.900902 | 5.865052 | 620.725648 | 307.992118 | 9626.805171 | 1665.836941 | ... | 5.479986 | 79.249231 | 65.773941 | 70.687779 | 55.419609 | 73.843940 | 0.031055 | 0.288615 | 1066.441891 | 67.318411 |
| std | 42.551007 | 7.792964 | 7.318313 | 1534.973316 | 791.001101 | 66.246042 | 854.051183 | 756.943964 | 9714.966405 | 1999.010159 | ... | 3.307419 | 3.609301 | 110.540980 | 8.024137 | 11.520078 | 13.766100 | 0.120453 | 0.453152 | 704.181434 | 53.252605 |
| min | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | ... | 0.000000 | 61.926000 | 9.720000 | 43.990087 | 21.200000 | 16.000000 | 0.000000 | 0.000000 | 10.000000 | 0.000000 |
| 25% | 3.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 2132.568621 | 0.000000 | ... | 2.653333 | 76.234000 | 44.972000 | 66.723930 | 46.296296 | 63.777778 | 0.000000 | 0.000000 | 483.250000 | 25.000000 |
| 50% | 11.000000 | 0.000000 | 1.000000 | 0.000000 | 0.000000 | 0.000000 | 327.275382 | 0.000000 | 5099.161603 | 1297.549800 | ... | 6.310000 | 78.577000 | 45.337000 | 68.890280 | 54.391176 | 74.000000 | 0.000000 | 0.000000 | 954.000000 | 55.000000 |
| 75% | 28.000000 | 6.000000 | 6.000000 | 1926.701848 | 950.154336 | 0.000000 | 675.665306 | 0.000000 | 14759.064020 | 2132.796300 | ... | 8.360000 | 82.245000 | 61.263330 | 74.633750 | 64.833333 | 85.380952 | 0.006304 | 1.000000 | 1508.750000 | 95.000000 |
| max | 916.000000 | 82.000000 | 80.000000 | 4893.294830 | 3463.680000 | 1219.680000 | 2813.512562 | 13273.920000 | 42392.719290 | 9149.269442 | ... | 26.474000 | 96.028000 | 2902.380000 | 85.472000 | 89.000000 | 100.000000 | 2.290000 | 1.000000 | 3136.000000 | 315.000000 |
8 rows × 46 columns
# split data into X dataframe and Y dataframe:
X_data = data_filled.drop("DailyVolume", axis=1)
Y_data = data_filled["DailyVolume"]
print(f"X_data.shape: {X_data.shape} Y_data.shape: {Y_data.shape}")
X_data.shape: (6746, 45) Y_data.shape: (6746,)
X = np.asarray(X_data)
Y = np.asarray(Y_data)
X_scaled = np.asarray(StandardScaler().fit_transform(X))
print(f"X.shape: {X.shape} Y.shape: {Y.shape}")
print(f"X_scaled.shape: {X_scaled.shape}")
X.shape: (6746, 45) Y.shape: (6746,) X_scaled.shape: (6746, 45)
# X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2, random_state=42)
X_scaled_train, X_scaled_test, Y_train, Y_test = train_test_split(X_scaled, Y, test_size=0.2, random_state=42)
# print(f"X_train.shape: {X_train.shape} X_test.shape: {X_test.shape}")
print(f"Y_train.shape: {Y_train.shape} Y_test.shape: {Y_test.shape}")
print(f"X_scaled_train.shape: {X_scaled_train.shape} X_scaled_test.shape: {X_scaled_test.shape}")
Y_train.shape: (5396,) Y_test.shape: (1350,) X_scaled_train.shape: (5396, 45) X_scaled_test.shape: (1350, 45)
# convert np arrays to tensor, with float.
# X_train = torch.from_numpy(X_train).float()
# X_test = torch.from_numpy(X_test).float()
X_scaled_train = torch.from_numpy(X_scaled_train).float()
X_scaled_test = torch.from_numpy(X_scaled_test).float()
Y_train = torch.reshape(torch.from_numpy(Y_train).float(), (5396, 1))
Y_test = torch.reshape(torch.from_numpy(Y_test).float(), (1350, 1))
# print(f"X_train.shape: {X_train.shape} X_test.shape: {X_test.shape}")
print(f"Y_train.shape: {Y_train.shape} Y_test.shape: {Y_test.shape}")
print(f"X_scaled_train.shape: {X_scaled_train.shape} X_scaled_test.shape: {X_scaled_test.shape}")
Y_train.shape: torch.Size([5396, 1]) Y_test.shape: torch.Size([1350, 1]) X_scaled_train.shape: torch.Size([5396, 45]) X_scaled_test.shape: torch.Size([1350, 45])
study_name = "cnn_45_scaled_one_conv_lyr"
torch.manual_seed(42)
torch.cuda.manual_seed(42)
np.random.seed(42)
random.seed(42)
n_epochs = 100
def objective_fn(trial):
# set up GPU if available.
device = "cpu"
if torch.cuda.is_available():
device = "cuda:0"
device = torch.device(device)
# get some parameters using optuna:
batch_size = trial.suggest_categorical("batch_size", [16,32,64,128])
n_layers = trial.suggest_int("n_hdn_layers", 2, 5)
max_nrns = trial.suggest_int("neurons_HL1", 2, 1024, step=2)
# now for the 1D convolution parameters
out_channel = trial.suggest_categorical("out_channel", [32, 64, 128])
kernel_size = trial.suggest_categorical("kernel_size", [3,4,5])
conv_activation = trial.suggest_categorical("conv_activation", ['relu', 'linear'])
# now for the dropout and max pool 1D parameters
dropout_prob = trial.suggest_categorical("dropout_prob", [0.1, 0.2, 0.25, 0.5])
mx_pl_size = trial.suggest_categorical("mx_pl_size", [2,3,4,5])
mx_pl_strides = trial.suggest_categorical("mx_pl_strides", [1,2,3])
layers = []
layers.append(torch.nn.Unflatten(dim=1, unflattened_size=(1,45)))
layers.append(torch.nn.Conv1d(1, out_channel, kernel_size))
if conv_activation == 'relu':
layers.append(torch.nn.ReLU())
layers.append(torch.nn.Dropout(dropout_prob))
layers.append(torch.nn.MaxPool1d(mx_pl_size, stride=mx_pl_strides))
layers.append(torch.nn.Flatten())
# now we calculate the output size of MaxPool1D
in_features = math.floor(((math.floor(45 - kernel_size + 1) - mx_pl_size) / mx_pl_strides) + 1)
in_features = in_features*out_channel
# print(f"batch_size{batch_size}\tmx_pl_size{mx_pl_size}\tmx_pl_strides{mx_pl_strides}")
# print(f"in_features{in_features}")
out_features = 0
for i in range(n_layers):
out_features = int(max_nrns)
layers.append(torch.nn.Linear(in_features, out_features))
activation = trial.suggest_categorical(f"HL{i}_ac_fn", ["relu", "linear"])
if activation == "relu":
layers.append(torch.nn.ReLU())
in_features = out_features
# to prevent the last layer being Linear(0,1)
if max_nrns > 2:
max_nrns = max_nrns/2
layers.append(torch.nn.Linear(out_features, 1))
cnn_model = torch.nn.Sequential(*layers).to(device)
# print(cnn_model)
# use MAE as loss function (called L1Loss).
loss_fn = nn.L1Loss()
optimizer = optim.Adam(cnn_model.parameters())
for epoch in range(n_epochs):
# now we train the model:
cnn_model.train()
# we create a random permutation of numbers from X_scaled_train.size()
permutation = torch.randperm(X_scaled_train.size()[0])
# we go through the batches.
for i in range(0, X_scaled_train.size()[0], batch_size):
indices = permutation[i:i+batch_size]
X_train_batch, Y_train_batch = X_scaled_train[indices], Y_train[indices]
# input X_scaled_train into cnn and get predictions.
train_prediction = cnn_model(X_train_batch.to(device))
train_loss = loss_fn(train_prediction, Y_train_batch.to(device))
optimizer.zero_grad()
# backpropagation
train_loss.backward()
optimizer.step()
# now we test the model:
cnn_model.eval()
test_prediction = cnn_model(X_scaled_test.to(device))
test_loss = loss_fn(test_prediction, Y_test.to(device))
# if the current test loss determines the trial to be prunable, we prune it:
trial.report(test_loss, step=epoch)
if trial.should_prune():
raise optuna.TrialPruned()
# we return the thing we are trying to maximize or minimize (the MAE - our loss fn)
return test_loss
%%time
study = optuna.create_study(sampler=optuna.samplers.TPESampler(seed=42), study_name=study_name, direction='minimize')
study.optimize(objective_fn, n_trials=5000)
[I 2021-12-17 11:36:59,414] A new study created in memory with name: cnn_45_scaled_one_conv_lyr [I 2021-12-17 11:37:39,233] Trial 0 finished with value: 378.0890197753906 and parameters: {'batch_size': 32, 'n_hdn_layers': 2, 'neurons_HL1': 160, 'out_channel': 64, 'kernel_size': 5, 'conv_activation': 'relu', 'dropout_prob': 0.5, 'mx_pl_size': 4, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 0 with value: 378.0890197753906. [I 2021-12-17 11:39:33,704] Trial 1 finished with value: 236.3225860595703 and parameters: {'batch_size': 32, 'n_hdn_layers': 5, 'neurons_HL1': 990, 'out_channel': 32, 'kernel_size': 3, 'conv_activation': 'relu', 'dropout_prob': 0.1, 'mx_pl_size': 5, 'mx_pl_strides': 2, 'HL0_ac_fn': 'linear', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'relu', 'HL4_ac_fn': 'relu'}. Best is trial 1 with value: 236.3225860595703. [I 2021-12-17 11:40:05,582] Trial 2 finished with value: 334.6907958984375 and parameters: {'batch_size': 128, 'n_hdn_layers': 2, 'neurons_HL1': 1012, 'out_channel': 32, 'kernel_size': 3, 'conv_activation': 'relu', 'dropout_prob': 0.25, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 1 with value: 236.3225860595703. [I 2021-12-17 11:40:53,602] Trial 3 finished with value: 183.64532470703125 and parameters: {'batch_size': 32, 'n_hdn_layers': 3, 'neurons_HL1': 28, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu'}. Best is trial 3 with value: 183.64532470703125. [I 2021-12-17 11:41:21,161] Trial 4 finished with value: 193.88101196289062 and parameters: {'batch_size': 128, 'n_hdn_layers': 2, 'neurons_HL1': 524, 'out_channel': 32, 'kernel_size': 4, 'conv_activation': 'linear', 'dropout_prob': 0.2, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'linear', 'HL1_ac_fn': 'relu'}. Best is trial 3 with value: 183.64532470703125. [I 2021-12-17 11:41:21,645] Trial 5 pruned. [I 2021-12-17 11:41:22,256] Trial 6 pruned. [I 2021-12-17 11:41:23,104] Trial 7 pruned. [I 2021-12-17 11:41:23,447] Trial 8 pruned. [I 2021-12-17 11:41:32,341] Trial 9 pruned. [I 2021-12-17 11:46:10,051] Trial 10 finished with value: 172.80258178710938 and parameters: {'batch_size': 16, 'n_hdn_layers': 3, 'neurons_HL1': 332, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 10 with value: 172.80258178710938. [I 2021-12-17 11:50:55,937] Trial 11 finished with value: 189.69667053222656 and parameters: {'batch_size': 16, 'n_hdn_layers': 3, 'neurons_HL1': 352, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 10 with value: 172.80258178710938. [I 2021-12-17 11:50:56,684] Trial 12 pruned. [I 2021-12-17 11:55:43,882] Trial 13 finished with value: 172.41595458984375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 346, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 12:00:48,165] Trial 14 finished with value: 193.06922912597656 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 382, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 4, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 12:05:34,097] Trial 15 pruned. [I 2021-12-17 12:10:16,661] Trial 16 finished with value: 176.14666748046875 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 332, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 12:18:19,951] Trial 17 finished with value: 188.04049682617188 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 642, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 12:18:26,105] Trial 18 pruned. [I 2021-12-17 12:18:27,293] Trial 19 pruned. [I 2021-12-17 12:18:28,740] Trial 20 pruned. [I 2021-12-17 12:22:47,673] Trial 21 finished with value: 180.59963989257812 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 296, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 12:28:40,612] Trial 22 finished with value: 190.87045288085938 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 446, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 12:28:48,158] Trial 23 pruned. [I 2021-12-17 12:28:55,230] Trial 24 pruned. [I 2021-12-17 12:29:55,415] Trial 25 pruned. [I 2021-12-17 12:35:37,708] Trial 26 finished with value: 182.62437438964844 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 434, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'linear', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 12:35:38,989] Trial 27 pruned. [I 2021-12-17 12:35:40,552] Trial 28 pruned. [I 2021-12-17 12:35:40,981] Trial 29 pruned. [I 2021-12-17 12:35:41,613] Trial 30 pruned. [I 2021-12-17 12:35:50,247] Trial 31 pruned. [I 2021-12-17 12:35:59,897] Trial 32 pruned. [I 2021-12-17 12:36:02,634] Trial 33 pruned. [I 2021-12-17 12:36:03,767] Trial 34 pruned. [I 2021-12-17 12:36:05,149] Trial 35 pruned. [I 2021-12-17 12:36:05,563] Trial 36 pruned. [I 2021-12-17 12:36:06,042] Trial 37 pruned. [I 2021-12-17 12:36:06,424] Trial 38 pruned. [I 2021-12-17 12:36:09,028] Trial 39 pruned. [I 2021-12-17 12:36:12,918] Trial 40 pruned. [I 2021-12-17 12:36:25,699] Trial 41 pruned. [I 2021-12-17 12:36:41,007] Trial 42 pruned. [I 2021-12-17 12:36:51,503] Trial 43 pruned. [I 2021-12-17 12:36:52,196] Trial 44 pruned. [I 2021-12-17 12:36:55,331] Trial 45 pruned. [I 2021-12-17 12:36:55,720] Trial 46 pruned. [I 2021-12-17 12:37:07,972] Trial 47 pruned. [I 2021-12-17 12:37:08,709] Trial 48 pruned. [I 2021-12-17 12:37:10,980] Trial 49 pruned. [I 2021-12-17 12:37:12,437] Trial 50 pruned. [I 2021-12-17 12:37:12,894] Trial 51 pruned. [I 2021-12-17 12:37:13,492] Trial 52 pruned. [I 2021-12-17 12:37:14,401] Trial 53 pruned. [I 2021-12-17 12:37:15,479] Trial 54 pruned. [I 2021-12-17 12:37:23,065] Trial 55 pruned. [I 2021-12-17 12:37:29,871] Trial 56 pruned. [I 2021-12-17 12:37:32,677] Trial 57 pruned. [I 2021-12-17 12:37:33,580] Trial 58 pruned. [I 2021-12-17 12:37:42,065] Trial 59 pruned. [I 2021-12-17 12:37:43,104] Trial 60 pruned. [I 2021-12-17 12:38:02,979] Trial 61 pruned. [I 2021-12-17 12:38:21,690] Trial 62 pruned. [I 2021-12-17 12:38:31,015] Trial 63 pruned. [I 2021-12-17 12:38:53,155] Trial 64 pruned. [I 2021-12-17 12:38:54,148] Trial 65 pruned. [I 2021-12-17 12:38:54,908] Trial 66 pruned. [I 2021-12-17 12:38:56,529] Trial 67 pruned. [I 2021-12-17 12:39:00,303] Trial 68 pruned. [I 2021-12-17 12:39:01,871] Trial 69 pruned. [I 2021-12-17 12:39:09,857] Trial 70 pruned. [I 2021-12-17 12:39:21,296] Trial 71 pruned. [I 2021-12-17 12:39:32,256] Trial 72 pruned. [I 2021-12-17 12:45:09,355] Trial 73 finished with value: 193.0162811279297 and parameters: {'batch_size': 16, 'n_hdn_layers': 2, 'neurons_HL1': 438, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 12:45:11,481] Trial 74 pruned. [I 2021-12-17 12:45:11,848] Trial 75 pruned. [I 2021-12-17 12:45:18,362] Trial 76 pruned. [I 2021-12-17 12:45:26,697] Trial 77 pruned. [I 2021-12-17 12:45:27,724] Trial 78 pruned. [I 2021-12-17 12:45:43,006] Trial 79 pruned. [I 2021-12-17 12:45:43,482] Trial 80 pruned. [I 2021-12-17 12:45:46,883] Trial 81 pruned. [I 2021-12-17 12:45:51,434] Trial 82 pruned. [I 2021-12-17 12:46:02,259] Trial 83 pruned. [I 2021-12-17 12:46:18,072] Trial 84 pruned. [I 2021-12-17 12:46:19,755] Trial 85 pruned. [I 2021-12-17 12:46:25,660] Trial 86 pruned. [I 2021-12-17 12:46:26,661] Trial 87 pruned. [I 2021-12-17 12:46:28,577] Trial 88 pruned. [I 2021-12-17 12:46:29,205] Trial 89 pruned. [I 2021-12-17 12:46:39,760] Trial 90 pruned. [I 2021-12-17 12:47:02,721] Trial 91 pruned. [I 2021-12-17 12:51:12,819] Trial 92 finished with value: 212.79750061035156 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 282, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 12:51:26,023] Trial 93 pruned. [I 2021-12-17 12:51:29,425] Trial 94 pruned. [I 2021-12-17 12:51:36,418] Trial 95 pruned. [I 2021-12-17 12:54:46,682] Trial 96 finished with value: 175.14276123046875 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 374, 'out_channel': 64, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 12:54:47,700] Trial 97 pruned. [I 2021-12-17 12:54:49,183] Trial 98 pruned. [I 2021-12-17 12:54:52,927] Trial 99 pruned. [I 2021-12-17 12:54:53,652] Trial 100 pruned. [I 2021-12-17 12:54:55,523] Trial 101 pruned. [I 2021-12-17 12:55:03,806] Trial 102 pruned. [I 2021-12-17 12:55:09,304] Trial 103 pruned. [I 2021-12-17 12:55:16,305] Trial 104 pruned. [I 2021-12-17 12:55:23,870] Trial 105 pruned. [I 2021-12-17 12:55:26,893] Trial 106 pruned. [I 2021-12-17 12:55:27,374] Trial 107 pruned. [I 2021-12-17 12:55:29,016] Trial 108 pruned. [I 2021-12-17 12:55:30,131] Trial 109 pruned. [I 2021-12-17 12:55:30,762] Trial 110 pruned. [I 2021-12-17 12:55:33,592] Trial 111 pruned. [I 2021-12-17 12:55:36,579] Trial 112 pruned. [I 2021-12-17 12:55:40,148] Trial 113 pruned. [I 2021-12-17 12:55:43,530] Trial 114 pruned. [I 2021-12-17 12:55:57,533] Trial 115 pruned. [I 2021-12-17 12:56:07,078] Trial 116 pruned. [I 2021-12-17 12:56:08,164] Trial 117 pruned. [I 2021-12-17 12:56:10,600] Trial 118 pruned. [I 2021-12-17 12:56:11,608] Trial 119 pruned. [I 2021-12-17 12:56:14,258] Trial 120 pruned. [I 2021-12-17 12:56:14,612] Trial 121 pruned. [I 2021-12-17 12:56:14,915] Trial 122 pruned. [I 2021-12-17 12:56:15,215] Trial 123 pruned. [I 2021-12-17 12:56:15,385] Trial 124 pruned. [I 2021-12-17 12:56:19,060] Trial 125 pruned. [I 2021-12-17 12:56:23,352] Trial 126 pruned. [I 2021-12-17 12:56:24,248] Trial 127 pruned. [I 2021-12-17 12:56:24,971] Trial 128 pruned. [I 2021-12-17 12:56:32,416] Trial 129 pruned. [I 2021-12-17 12:56:33,119] Trial 130 pruned. [I 2021-12-17 12:56:40,538] Trial 131 pruned. [I 2021-12-17 12:56:51,062] Trial 132 pruned. [I 2021-12-17 12:56:58,130] Trial 133 pruned. [I 2021-12-17 12:57:00,458] Trial 134 pruned. [I 2021-12-17 13:02:32,977] Trial 135 finished with value: 174.30931091308594 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 414, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 13:02:36,298] Trial 136 pruned. [I 2021-12-17 13:02:39,266] Trial 137 pruned. [I 2021-12-17 13:02:53,136] Trial 138 pruned. [I 2021-12-17 13:02:59,008] Trial 139 pruned. [I 2021-12-17 13:03:14,066] Trial 140 pruned. [I 2021-12-17 13:03:19,129] Trial 141 pruned. [I 2021-12-17 13:03:24,822] Trial 142 pruned. [I 2021-12-17 13:03:28,174] Trial 143 pruned. [I 2021-12-17 13:03:30,056] Trial 144 pruned. [I 2021-12-17 13:03:30,914] Trial 145 pruned. [I 2021-12-17 13:03:36,954] Trial 146 pruned. [I 2021-12-17 13:03:38,853] Trial 147 pruned. [I 2021-12-17 13:03:48,536] Trial 148 pruned. [I 2021-12-17 13:03:50,011] Trial 149 pruned. [I 2021-12-17 13:03:51,362] Trial 150 pruned. [I 2021-12-17 13:03:52,509] Trial 151 pruned. [I 2021-12-17 13:03:53,430] Trial 152 pruned. [I 2021-12-17 13:03:54,481] Trial 153 pruned. [I 2021-12-17 13:03:54,999] Trial 154 pruned. [I 2021-12-17 13:03:55,242] Trial 155 pruned. [I 2021-12-17 13:03:56,668] Trial 156 pruned. [I 2021-12-17 13:04:07,680] Trial 157 pruned. [I 2021-12-17 13:04:09,500] Trial 158 pruned. [I 2021-12-17 13:04:17,205] Trial 159 pruned. [I 2021-12-17 13:04:18,494] Trial 160 pruned. [I 2021-12-17 13:04:18,838] Trial 161 pruned. [I 2021-12-17 13:04:19,168] Trial 162 pruned. [I 2021-12-17 13:04:19,533] Trial 163 pruned. [I 2021-12-17 13:04:19,858] Trial 164 pruned. [I 2021-12-17 13:04:21,281] Trial 165 pruned. [I 2021-12-17 13:04:22,048] Trial 166 pruned. [I 2021-12-17 13:04:41,917] Trial 167 pruned. [I 2021-12-17 13:04:43,320] Trial 168 pruned. [I 2021-12-17 13:04:43,741] Trial 169 pruned. [I 2021-12-17 13:04:51,835] Trial 170 pruned. [I 2021-12-17 13:04:52,218] Trial 171 pruned. [I 2021-12-17 13:04:52,702] Trial 172 pruned. [I 2021-12-17 13:04:53,124] Trial 173 pruned. [I 2021-12-17 13:04:53,832] Trial 174 pruned. [I 2021-12-17 13:04:54,206] Trial 175 pruned. [I 2021-12-17 13:04:56,612] Trial 176 pruned. [I 2021-12-17 13:04:58,757] Trial 177 pruned. [I 2021-12-17 13:05:05,247] Trial 178 pruned. [I 2021-12-17 13:05:05,650] Trial 179 pruned. [I 2021-12-17 13:05:06,996] Trial 180 pruned. [I 2021-12-17 13:05:19,434] Trial 181 pruned. [I 2021-12-17 13:05:23,389] Trial 182 pruned. [I 2021-12-17 13:05:33,822] Trial 183 pruned. [I 2021-12-17 13:05:59,929] Trial 184 pruned. [I 2021-12-17 13:06:05,624] Trial 185 pruned. [I 2021-12-17 13:06:06,330] Trial 186 pruned. [I 2021-12-17 13:06:15,475] Trial 187 pruned. [I 2021-12-17 13:06:19,280] Trial 188 pruned. [I 2021-12-17 13:06:25,677] Trial 189 pruned. [I 2021-12-17 13:06:27,065] Trial 190 pruned. [I 2021-12-17 13:06:27,893] Trial 191 pruned. [I 2021-12-17 13:06:39,286] Trial 192 pruned. [I 2021-12-17 13:06:51,773] Trial 193 pruned. [I 2021-12-17 13:06:53,901] Trial 194 pruned. [I 2021-12-17 13:06:56,478] Trial 195 pruned. [I 2021-12-17 13:06:56,757] Trial 196 pruned. [I 2021-12-17 13:07:07,560] Trial 197 pruned. [I 2021-12-17 13:07:09,414] Trial 198 pruned. [I 2021-12-17 13:07:09,906] Trial 199 pruned. [I 2021-12-17 13:07:10,837] Trial 200 pruned. [I 2021-12-17 13:07:11,893] Trial 201 pruned. [I 2021-12-17 13:07:13,046] Trial 202 pruned. [I 2021-12-17 13:07:14,277] Trial 203 pruned. [I 2021-12-17 13:07:15,065] Trial 204 pruned. [I 2021-12-17 13:07:22,123] Trial 205 pruned. [I 2021-12-17 13:07:23,804] Trial 206 pruned. [I 2021-12-17 13:07:24,372] Trial 207 pruned. [I 2021-12-17 13:07:33,776] Trial 208 pruned. [I 2021-12-17 13:07:57,220] Trial 209 pruned. [I 2021-12-17 13:07:58,935] Trial 210 pruned. [I 2021-12-17 13:08:03,199] Trial 211 pruned. [I 2021-12-17 13:08:16,265] Trial 212 pruned. [I 2021-12-17 13:08:20,230] Trial 213 pruned. [I 2021-12-17 13:09:04,741] Trial 214 pruned. [I 2021-12-17 13:09:14,984] Trial 215 pruned. [I 2021-12-17 13:09:15,437] Trial 216 pruned. [I 2021-12-17 13:09:26,615] Trial 217 pruned. [I 2021-12-17 13:09:28,479] Trial 218 pruned. [I 2021-12-17 13:09:29,060] Trial 219 pruned. [I 2021-12-17 13:09:40,717] Trial 220 pruned. [I 2021-12-17 13:09:56,915] Trial 221 pruned. [I 2021-12-17 13:10:01,181] Trial 222 pruned. [I 2021-12-17 13:10:11,116] Trial 223 pruned. [I 2021-12-17 13:10:28,915] Trial 224 pruned. [I 2021-12-17 13:10:32,966] Trial 225 pruned. [I 2021-12-17 13:10:34,067] Trial 226 pruned. [I 2021-12-17 13:10:49,585] Trial 227 pruned. [I 2021-12-17 13:10:51,051] Trial 228 pruned. [I 2021-12-17 13:10:52,327] Trial 229 pruned. [I 2021-12-17 13:10:55,549] Trial 230 pruned. [I 2021-12-17 13:11:08,899] Trial 231 pruned. [I 2021-12-17 13:11:10,372] Trial 232 pruned. [I 2021-12-17 13:11:24,147] Trial 233 pruned. [I 2021-12-17 13:11:26,746] Trial 234 pruned. [I 2021-12-17 13:11:28,836] Trial 235 pruned. [I 2021-12-17 13:11:35,638] Trial 236 pruned. [I 2021-12-17 13:11:35,956] Trial 237 pruned. [I 2021-12-17 13:11:44,950] Trial 238 pruned. [I 2021-12-17 13:11:45,938] Trial 239 pruned. [I 2021-12-17 13:11:53,360] Trial 240 pruned. [I 2021-12-17 13:12:07,030] Trial 241 pruned. [I 2021-12-17 13:12:20,603] Trial 242 pruned. [I 2021-12-17 13:17:33,428] Trial 243 finished with value: 183.65724182128906 and parameters: {'batch_size': 16, 'n_hdn_layers': 2, 'neurons_HL1': 400, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 3, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 13 with value: 172.41595458984375. [I 2021-12-17 13:17:36,236] Trial 244 pruned. [I 2021-12-17 13:17:48,776] Trial 245 pruned. [I 2021-12-17 13:20:20,377] Trial 246 finished with value: 164.47718811035156 and parameters: {'batch_size': 16, 'n_hdn_layers': 3, 'neurons_HL1': 364, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 246 with value: 164.47718811035156. [I 2021-12-17 13:20:24,918] Trial 247 pruned. [I 2021-12-17 13:20:26,468] Trial 248 pruned. [I 2021-12-17 13:20:27,921] Trial 249 pruned. [I 2021-12-17 13:20:28,509] Trial 250 pruned. [I 2021-12-17 13:20:28,896] Trial 251 pruned. [I 2021-12-17 13:20:30,614] Trial 252 pruned. [I 2021-12-17 13:20:31,068] Trial 253 pruned. [I 2021-12-17 13:20:34,081] Trial 254 pruned. [I 2021-12-17 13:20:35,351] Trial 255 pruned. [I 2021-12-17 13:20:41,456] Trial 256 pruned. [I 2021-12-17 13:20:43,494] Trial 257 pruned. [I 2021-12-17 13:20:46,216] Trial 258 pruned. [I 2021-12-17 13:20:47,376] Trial 259 pruned. [I 2021-12-17 13:20:48,646] Trial 260 pruned. [I 2021-12-17 13:21:16,071] Trial 261 pruned. [I 2021-12-17 13:21:17,659] Trial 262 pruned. [I 2021-12-17 13:21:18,402] Trial 263 pruned. [I 2021-12-17 13:21:19,276] Trial 264 pruned. [I 2021-12-17 13:28:23,153] Trial 265 finished with value: 163.23626708984375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 530, 'out_channel': 128, 'kernel_size': 4, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 13:28:24,068] Trial 266 pruned. [I 2021-12-17 13:28:33,961] Trial 267 pruned. [I 2021-12-17 13:28:43,633] Trial 268 pruned. [I 2021-12-17 13:28:44,726] Trial 269 pruned. [I 2021-12-17 13:28:45,284] Trial 270 pruned. [I 2021-12-17 13:28:51,187] Trial 271 pruned. [I 2021-12-17 13:28:52,801] Trial 272 pruned. [I 2021-12-17 13:33:33,469] Trial 273 finished with value: 170.61795043945312 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 328, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'linear', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 13:33:38,945] Trial 274 pruned. [I 2021-12-17 13:38:31,866] Trial 275 finished with value: 178.44134521484375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 352, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 13:38:34,309] Trial 276 pruned. [I 2021-12-17 13:38:37,971] Trial 277 pruned. [I 2021-12-17 13:43:06,162] Trial 278 finished with value: 175.86550903320312 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 310, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 13:47:02,405] Trial 279 finished with value: 188.5216827392578 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 258, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 13:47:07,152] Trial 280 pruned. [I 2021-12-17 13:47:12,153] Trial 281 pruned. [I 2021-12-17 13:47:17,604] Trial 282 pruned. [I 2021-12-17 13:47:28,367] Trial 283 pruned. [I 2021-12-17 13:47:30,439] Trial 284 pruned. [I 2021-12-17 13:51:57,772] Trial 285 finished with value: 179.24380493164062 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 308, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 13:56:35,820] Trial 286 finished with value: 174.60755920410156 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 324, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 13:56:43,935] Trial 287 pruned. [I 2021-12-17 14:01:03,176] Trial 288 finished with value: 199.51914978027344 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 296, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 14:01:10,637] Trial 289 pruned. [I 2021-12-17 14:05:31,159] Trial 290 finished with value: 175.18344116210938 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 298, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 14:10:09,506] Trial 291 finished with value: 175.98753356933594 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 324, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 14:10:17,835] Trial 292 pruned. [I 2021-12-17 14:10:26,365] Trial 293 pruned. [I 2021-12-17 14:10:32,338] Trial 294 pruned. [I 2021-12-17 14:10:37,944] Trial 295 pruned. [I 2021-12-17 14:10:43,321] Trial 296 pruned. [I 2021-12-17 14:10:55,309] Trial 297 pruned. [I 2021-12-17 14:10:58,177] Trial 298 pruned. [I 2021-12-17 14:11:03,448] Trial 299 pruned. [I 2021-12-17 14:11:09,903] Trial 300 pruned. [I 2021-12-17 14:11:12,385] Trial 301 pruned. [I 2021-12-17 14:16:09,786] Trial 302 finished with value: 175.62860107421875 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 356, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 14:16:21,477] Trial 303 pruned. [I 2021-12-17 14:16:24,577] Trial 304 pruned. [I 2021-12-17 14:21:03,688] Trial 305 finished with value: 172.02037048339844 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 326, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 14:21:06,456] Trial 306 pruned. [I 2021-12-17 14:21:12,322] Trial 307 pruned. [I 2021-12-17 14:21:14,985] Trial 308 pruned. [I 2021-12-17 14:25:47,677] Trial 309 finished with value: 164.80902099609375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 316, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 14:25:53,177] Trial 310 pruned. [I 2021-12-17 14:25:58,359] Trial 311 pruned. [I 2021-12-17 14:26:04,064] Trial 312 pruned. [I 2021-12-17 14:26:06,604] Trial 313 pruned. [I 2021-12-17 14:26:12,103] Trial 314 pruned. [I 2021-12-17 14:31:01,324] Trial 315 finished with value: 167.71829223632812 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 346, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 14:31:07,318] Trial 316 pruned. [I 2021-12-17 14:31:16,107] Trial 317 pruned. [I 2021-12-17 14:31:27,466] Trial 318 pruned. [I 2021-12-17 14:31:32,831] Trial 319 pruned. [I 2021-12-17 14:31:39,026] Trial 320 pruned. [I 2021-12-17 14:31:41,906] Trial 321 pruned. [I 2021-12-17 14:31:46,691] Trial 322 pruned. [I 2021-12-17 14:31:49,363] Trial 323 pruned. [I 2021-12-17 14:31:52,461] Trial 324 pruned. [I 2021-12-17 14:31:55,237] Trial 325 pruned. [I 2021-12-17 14:32:01,052] Trial 326 pruned. [I 2021-12-17 14:32:07,464] Trial 327 pruned. [I 2021-12-17 14:32:10,028] Trial 328 pruned. [I 2021-12-17 14:32:16,015] Trial 329 pruned. [I 2021-12-17 14:32:21,412] Trial 330 pruned. [I 2021-12-17 14:32:24,277] Trial 331 pruned. [I 2021-12-17 14:32:34,821] Trial 332 pruned. [I 2021-12-17 14:32:37,915] Trial 333 pruned. [I 2021-12-17 14:32:43,702] Trial 334 pruned. [I 2021-12-17 14:32:46,157] Trial 335 pruned. [I 2021-12-17 14:32:54,405] Trial 336 pruned. [I 2021-12-17 14:38:19,155] Trial 337 finished with value: 195.30001831054688 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 396, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 14:38:27,006] Trial 338 pruned. [I 2021-12-17 14:38:32,976] Trial 339 pruned. [I 2021-12-17 14:38:36,118] Trial 340 pruned. [I 2021-12-17 14:38:41,741] Trial 341 pruned. [I 2021-12-17 14:38:51,754] Trial 342 pruned. [I 2021-12-17 14:38:57,495] Trial 343 pruned. [I 2021-12-17 14:39:05,114] Trial 344 pruned. [I 2021-12-17 14:39:07,886] Trial 345 pruned. [I 2021-12-17 14:39:10,233] Trial 346 pruned. [I 2021-12-17 14:39:19,318] Trial 347 pruned. [I 2021-12-17 14:44:08,536] Trial 348 finished with value: 163.7893829345703 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 346, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 14:44:13,856] Trial 349 pruned. [I 2021-12-17 14:48:58,920] Trial 350 finished with value: 170.67913818359375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 340, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 14:49:07,837] Trial 351 pruned. [I 2021-12-17 14:49:11,063] Trial 352 pruned. [I 2021-12-17 14:49:16,805] Trial 353 pruned. [I 2021-12-17 14:49:29,066] Trial 354 pruned. [I 2021-12-17 14:49:34,734] Trial 355 pruned. [I 2021-12-17 14:49:40,591] Trial 356 pruned. [I 2021-12-17 14:49:48,696] Trial 357 pruned. [I 2021-12-17 14:49:55,483] Trial 358 pruned. [I 2021-12-17 14:50:01,795] Trial 359 pruned. [I 2021-12-17 14:50:04,637] Trial 360 pruned. [I 2021-12-17 14:50:10,636] Trial 361 pruned. [I 2021-12-17 14:50:13,919] Trial 362 pruned. [I 2021-12-17 14:50:26,997] Trial 363 pruned. [I 2021-12-17 14:55:15,178] Trial 364 finished with value: 176.34854125976562 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 344, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 14:55:20,947] Trial 365 pruned. [I 2021-12-17 14:59:59,676] Trial 366 finished with value: 168.68338012695312 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 324, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 15:00:02,449] Trial 367 pruned. [I 2021-12-17 15:00:04,964] Trial 368 pruned. [I 2021-12-17 15:00:19,156] Trial 369 pruned. [I 2021-12-17 15:00:22,116] Trial 370 pruned. [I 2021-12-17 15:00:27,445] Trial 371 pruned. [I 2021-12-17 15:00:32,943] Trial 372 pruned. [I 2021-12-17 15:00:39,037] Trial 373 pruned. [I 2021-12-17 15:00:44,380] Trial 374 pruned. [I 2021-12-17 15:00:47,007] Trial 375 pruned. [I 2021-12-17 15:00:52,568] Trial 376 pruned. [I 2021-12-17 15:00:58,765] Trial 377 pruned. [I 2021-12-17 15:05:50,595] Trial 378 finished with value: 164.1510772705078 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 348, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 15:05:55,697] Trial 379 pruned. [I 2021-12-17 15:06:01,586] Trial 380 pruned. [I 2021-12-17 15:06:04,307] Trial 381 pruned. [I 2021-12-17 15:06:09,982] Trial 382 pruned. [I 2021-12-17 15:06:17,384] Trial 383 pruned. [I 2021-12-17 15:06:20,344] Trial 384 pruned. [I 2021-12-17 15:06:23,006] Trial 385 pruned. [I 2021-12-17 15:11:05,063] Trial 386 finished with value: 164.20889282226562 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 330, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 15:11:10,732] Trial 387 pruned. [I 2021-12-17 15:11:16,157] Trial 388 pruned. [I 2021-12-17 15:11:17,990] Trial 389 pruned. [I 2021-12-17 15:11:20,528] Trial 390 pruned. [I 2021-12-17 15:11:23,638] Trial 391 pruned. [I 2021-12-17 15:11:32,110] Trial 392 pruned. [I 2021-12-17 15:11:34,731] Trial 393 pruned. [I 2021-12-17 15:11:40,731] Trial 394 pruned. [I 2021-12-17 15:11:46,189] Trial 395 pruned. [I 2021-12-17 15:11:49,115] Trial 396 pruned. [I 2021-12-17 15:11:51,798] Trial 397 pruned. [I 2021-12-17 15:11:53,268] Trial 398 pruned. [I 2021-12-17 15:11:53,984] Trial 399 pruned. [I 2021-12-17 15:11:57,074] Trial 400 pruned. [I 2021-12-17 15:11:58,642] Trial 401 pruned. [I 2021-12-17 15:12:04,524] Trial 402 pruned. [I 2021-12-17 15:12:10,003] Trial 403 pruned. [I 2021-12-17 15:12:16,245] Trial 404 pruned. [I 2021-12-17 15:12:18,815] Trial 405 pruned. [I 2021-12-17 15:12:24,516] Trial 406 pruned. [I 2021-12-17 15:12:36,535] Trial 407 pruned. [I 2021-12-17 15:17:06,472] Trial 408 finished with value: 176.974853515625 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 312, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 1, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 15:17:14,811] Trial 409 pruned. [I 2021-12-17 15:17:17,830] Trial 410 pruned. [I 2021-12-17 15:19:36,919] Trial 411 finished with value: 175.72442626953125 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 306, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 265 with value: 163.23626708984375. [I 2021-12-17 15:19:40,065] Trial 412 pruned. [I 2021-12-17 15:19:41,526] Trial 413 pruned. [I 2021-12-17 15:22:12,206] Trial 414 finished with value: 162.58702087402344 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 354, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:22:15,101] Trial 415 pruned. [I 2021-12-17 15:22:24,226] Trial 416 pruned. [I 2021-12-17 15:24:44,084] Trial 417 finished with value: 205.28236389160156 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 308, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:25:03,072] Trial 418 pruned. [I 2021-12-17 15:25:06,289] Trial 419 pruned. [I 2021-12-17 15:25:09,191] Trial 420 pruned. [I 2021-12-17 15:25:15,266] Trial 421 pruned. [I 2021-12-17 15:25:16,128] Trial 422 pruned. [I 2021-12-17 15:27:26,726] Trial 423 finished with value: 169.7684783935547 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 272, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:27:27,953] Trial 424 pruned. [I 2021-12-17 15:27:31,829] Trial 425 pruned. [I 2021-12-17 15:27:33,194] Trial 426 pruned. [I 2021-12-17 15:27:36,854] Trial 427 pruned. [I 2021-12-17 15:27:41,326] Trial 428 pruned. [I 2021-12-17 15:27:44,427] Trial 429 pruned. [I 2021-12-17 15:29:41,558] Trial 430 finished with value: 165.0712432861328 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 222, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:29:42,663] Trial 431 pruned. [I 2021-12-17 15:29:44,979] Trial 432 pruned. [I 2021-12-17 15:29:50,914] Trial 433 pruned. [I 2021-12-17 15:29:53,378] Trial 434 pruned. [I 2021-12-17 15:29:55,917] Trial 435 pruned. [I 2021-12-17 15:29:57,332] Trial 436 pruned. [I 2021-12-17 15:32:20,078] Trial 437 finished with value: 168.35597229003906 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 324, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:32:22,955] Trial 438 pruned. [I 2021-12-17 15:32:25,717] Trial 439 pruned. [I 2021-12-17 15:35:05,261] Trial 440 finished with value: 168.25392150878906 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 388, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:35:08,561] Trial 441 pruned. [I 2021-12-17 15:35:11,784] Trial 442 pruned. [I 2021-12-17 15:35:32,073] Trial 443 pruned. [I 2021-12-17 15:35:36,866] Trial 444 pruned. [I 2021-12-17 15:35:42,970] Trial 445 pruned. [I 2021-12-17 15:35:43,478] Trial 446 pruned. [I 2021-12-17 15:35:46,573] Trial 447 pruned. [I 2021-12-17 15:37:45,444] Trial 448 finished with value: 168.50723266601562 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 226, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:40:29,739] Trial 449 finished with value: 165.34286499023438 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 408, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:40:34,534] Trial 450 pruned. [I 2021-12-17 15:40:36,187] Trial 451 pruned. [I 2021-12-17 15:42:32,236] Trial 452 finished with value: 187.89251708984375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 216, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:42:42,636] Trial 453 pruned. [I 2021-12-17 15:45:30,551] Trial 454 finished with value: 167.78213500976562 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 422, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:48:15,917] Trial 455 finished with value: 167.8356475830078 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 414, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:48:19,320] Trial 456 pruned. [I 2021-12-17 15:51:11,153] Trial 457 finished with value: 166.21771240234375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 438, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:51:18,085] Trial 458 pruned. [I 2021-12-17 15:51:24,907] Trial 459 pruned. [I 2021-12-17 15:51:26,631] Trial 460 pruned. [I 2021-12-17 15:51:28,462] Trial 461 pruned. [I 2021-12-17 15:51:30,278] Trial 462 pruned. [I 2021-12-17 15:51:33,337] Trial 463 pruned. [I 2021-12-17 15:51:35,075] Trial 464 pruned. [I 2021-12-17 15:51:36,209] Trial 465 pruned. [I 2021-12-17 15:51:37,873] Trial 466 pruned. [I 2021-12-17 15:51:41,345] Trial 467 pruned. [I 2021-12-17 15:54:25,637] Trial 468 finished with value: 176.52389526367188 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 410, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:54:27,252] Trial 469 pruned. [I 2021-12-17 15:54:27,836] Trial 470 pruned. [I 2021-12-17 15:54:29,544] Trial 471 pruned. [I 2021-12-17 15:54:34,398] Trial 472 pruned. [I 2021-12-17 15:54:35,610] Trial 473 pruned. [I 2021-12-17 15:54:39,168] Trial 474 pruned. [I 2021-12-17 15:54:39,480] Trial 475 pruned. [I 2021-12-17 15:54:42,799] Trial 476 pruned. [I 2021-12-17 15:57:45,523] Trial 477 finished with value: 174.20947265625 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 478, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 414 with value: 162.58702087402344. [I 2021-12-17 15:57:49,162] Trial 478 pruned. [I 2021-12-17 15:57:52,848] Trial 479 pruned. [I 2021-12-17 15:57:56,280] Trial 480 pruned. [I 2021-12-17 15:57:58,098] Trial 481 pruned. [I 2021-12-17 16:01:07,389] Trial 482 finished with value: 159.2676239013672 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 504, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:01:11,146] Trial 483 pruned. [I 2021-12-17 16:01:28,090] Trial 484 pruned. [I 2021-12-17 16:01:29,936] Trial 485 pruned. [I 2021-12-17 16:01:31,968] Trial 486 pruned. [I 2021-12-17 16:01:35,659] Trial 487 pruned. [I 2021-12-17 16:01:37,523] Trial 488 pruned. [I 2021-12-17 16:01:41,314] Trial 489 pruned. [I 2021-12-17 16:01:45,229] Trial 490 pruned. [I 2021-12-17 16:01:48,670] Trial 491 pruned. [I 2021-12-17 16:01:50,621] Trial 492 pruned. [I 2021-12-17 16:01:54,483] Trial 493 pruned. [I 2021-12-17 16:01:56,379] Trial 494 pruned. [I 2021-12-17 16:01:56,920] Trial 495 pruned. [I 2021-12-17 16:02:00,687] Trial 496 pruned. [I 2021-12-17 16:02:06,969] Trial 497 pruned. [I 2021-12-17 16:02:07,294] Trial 498 pruned. [I 2021-12-17 16:02:10,866] Trial 499 pruned. [I 2021-12-17 16:02:14,994] Trial 500 pruned. [I 2021-12-17 16:02:16,525] Trial 501 pruned. [I 2021-12-17 16:02:18,294] Trial 502 pruned. [I 2021-12-17 16:02:19,526] Trial 503 pruned. [I 2021-12-17 16:02:21,250] Trial 504 pruned. [I 2021-12-17 16:02:26,102] Trial 505 pruned. [I 2021-12-17 16:02:28,032] Trial 506 pruned. [I 2021-12-17 16:02:34,183] Trial 507 pruned. [I 2021-12-17 16:02:35,715] Trial 508 pruned. [I 2021-12-17 16:02:38,865] Trial 509 pruned. [I 2021-12-17 16:02:40,396] Trial 510 pruned. [I 2021-12-17 16:02:43,828] Trial 511 pruned. [I 2021-12-17 16:02:49,345] Trial 512 pruned. [I 2021-12-17 16:05:31,623] Trial 513 finished with value: 162.505859375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 400, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:05:36,443] Trial 514 pruned. [I 2021-12-17 16:05:39,728] Trial 515 pruned. [I 2021-12-17 16:05:41,266] Trial 516 pruned. [I 2021-12-17 16:05:41,833] Trial 517 pruned. [I 2021-12-17 16:05:43,379] Trial 518 pruned. [I 2021-12-17 16:05:46,984] Trial 519 pruned. [I 2021-12-17 16:05:49,165] Trial 520 pruned. [I 2021-12-17 16:05:55,374] Trial 521 pruned. [I 2021-12-17 16:05:55,769] Trial 522 pruned. [I 2021-12-17 16:05:57,287] Trial 523 pruned. [I 2021-12-17 16:05:59,007] Trial 524 pruned. [I 2021-12-17 16:06:04,878] Trial 525 pruned. [I 2021-12-17 16:06:06,492] Trial 526 pruned. [I 2021-12-17 16:06:11,452] Trial 527 pruned. [I 2021-12-17 16:06:13,315] Trial 528 pruned. [I 2021-12-17 16:06:16,175] Trial 529 pruned. [I 2021-12-17 16:06:19,312] Trial 530 pruned. [I 2021-12-17 16:08:45,769] Trial 531 finished with value: 164.28286743164062 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 340, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:10:31,525] Trial 532 finished with value: 163.90646362304688 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 182, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:10:32,524] Trial 533 pruned. [I 2021-12-17 16:10:33,647] Trial 534 pruned. [I 2021-12-17 16:10:34,688] Trial 535 pruned. [I 2021-12-17 16:10:35,756] Trial 536 pruned. [I 2021-12-17 16:10:36,876] Trial 537 pruned. [I 2021-12-17 16:10:38,178] Trial 538 pruned. [I 2021-12-17 16:10:38,842] Trial 539 pruned. [I 2021-12-17 16:10:40,522] Trial 540 pruned. [I 2021-12-17 16:10:41,470] Trial 541 pruned. [I 2021-12-17 16:10:42,703] Trial 542 pruned. [I 2021-12-17 16:10:43,895] Trial 543 pruned. [I 2021-12-17 16:10:45,241] Trial 544 pruned. [I 2021-12-17 16:10:49,365] Trial 545 pruned. [I 2021-12-17 16:10:50,471] Trial 546 pruned. [I 2021-12-17 16:10:51,056] Trial 547 pruned. [I 2021-12-17 16:10:51,270] Trial 548 pruned. [I 2021-12-17 16:10:56,166] Trial 549 pruned. [I 2021-12-17 16:10:57,167] Trial 550 pruned. [I 2021-12-17 16:10:58,347] Trial 551 pruned. [I 2021-12-17 16:11:01,410] Trial 552 pruned. [I 2021-12-17 16:11:05,465] Trial 553 pruned. [I 2021-12-17 16:11:08,696] Trial 554 pruned. [I 2021-12-17 16:11:10,153] Trial 555 pruned. [I 2021-12-17 16:11:38,456] Trial 556 pruned. [I 2021-12-17 16:11:39,654] Trial 557 pruned. [I 2021-12-17 16:11:43,491] Trial 558 pruned. [I 2021-12-17 16:11:46,909] Trial 559 pruned. [I 2021-12-17 16:11:47,815] Trial 560 pruned. [I 2021-12-17 16:11:50,775] Trial 561 pruned. [I 2021-12-17 16:11:53,853] Trial 562 pruned. [I 2021-12-17 16:11:58,078] Trial 563 pruned. [I 2021-12-17 16:11:58,978] Trial 564 pruned. [I 2021-12-17 16:14:39,301] Trial 565 finished with value: 170.72938537597656 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 392, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:14:44,146] Trial 566 pruned. [I 2021-12-17 16:14:47,605] Trial 567 pruned. [I 2021-12-17 16:17:33,040] Trial 568 finished with value: 168.50872802734375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 414, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:17:33,636] Trial 569 pruned. [I 2021-12-17 16:17:35,419] Trial 570 pruned. [I 2021-12-17 16:17:38,999] Trial 571 pruned. [I 2021-12-17 16:17:42,558] Trial 572 pruned. [I 2021-12-17 16:17:42,967] Trial 573 pruned. [I 2021-12-17 16:17:46,131] Trial 574 pruned. [I 2021-12-17 16:17:47,864] Trial 575 pruned. [I 2021-12-17 16:17:51,170] Trial 576 pruned. [I 2021-12-17 16:17:55,939] Trial 577 pruned. [I 2021-12-17 16:17:57,511] Trial 578 pruned. [I 2021-12-17 16:18:01,515] Trial 579 pruned. [I 2021-12-17 16:18:04,996] Trial 580 pruned. [I 2021-12-17 16:18:06,554] Trial 581 pruned. [I 2021-12-17 16:18:08,665] Trial 582 pruned. [I 2021-12-17 16:18:13,770] Trial 583 pruned. [I 2021-12-17 16:18:15,302] Trial 584 pruned. [I 2021-12-17 16:18:16,910] Trial 585 pruned. [I 2021-12-17 16:18:20,939] Trial 586 pruned. [I 2021-12-17 16:18:22,702] Trial 587 pruned. [I 2021-12-17 16:18:23,807] Trial 588 pruned. [I 2021-12-17 16:18:25,102] Trial 589 pruned. [I 2021-12-17 16:18:25,943] Trial 590 pruned. [I 2021-12-17 16:18:26,826] Trial 591 pruned. [I 2021-12-17 16:18:28,295] Trial 592 pruned. [I 2021-12-17 16:18:28,803] Trial 593 pruned. [I 2021-12-17 16:18:32,967] Trial 594 pruned. [I 2021-12-17 16:21:08,765] Trial 595 finished with value: 162.86712646484375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 376, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:21:21,624] Trial 596 pruned. [I 2021-12-17 16:21:23,249] Trial 597 pruned. [I 2021-12-17 16:21:23,649] Trial 598 pruned. [I 2021-12-17 16:21:27,214] Trial 599 pruned. [I 2021-12-17 16:21:30,295] Trial 600 pruned. [I 2021-12-17 16:21:36,402] Trial 601 pruned. [I 2021-12-17 16:21:38,149] Trial 602 pruned. [I 2021-12-17 16:21:39,707] Trial 603 pruned. [I 2021-12-17 16:24:22,989] Trial 604 finished with value: 164.394775390625 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 404, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:24:27,917] Trial 605 pruned. [I 2021-12-17 16:24:33,101] Trial 606 pruned. [I 2021-12-17 16:24:36,462] Trial 607 pruned. [I 2021-12-17 16:27:24,329] Trial 608 finished with value: 164.16831970214844 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 420, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:27:26,163] Trial 609 pruned. [I 2021-12-17 16:27:29,577] Trial 610 pruned. [I 2021-12-17 16:27:30,313] Trial 611 pruned. [I 2021-12-17 16:27:30,805] Trial 612 pruned. [I 2021-12-17 16:30:12,177] Trial 613 finished with value: 164.89630126953125 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 398, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.2, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:30:14,033] Trial 614 pruned. [I 2021-12-17 16:30:15,726] Trial 615 pruned. [I 2021-12-17 16:30:17,414] Trial 616 pruned. [I 2021-12-17 16:30:19,160] Trial 617 pruned. [I 2021-12-17 16:30:22,618] Trial 618 pruned. [I 2021-12-17 16:30:23,206] Trial 619 pruned. [I 2021-12-17 16:30:28,591] Trial 620 pruned. [I 2021-12-17 16:30:32,128] Trial 621 pruned. [I 2021-12-17 16:30:32,523] Trial 622 pruned. [I 2021-12-17 16:30:35,738] Trial 623 pruned. [I 2021-12-17 16:30:37,496] Trial 624 pruned. [I 2021-12-17 16:30:40,824] Trial 625 pruned. [I 2021-12-17 16:30:44,243] Trial 626 pruned. [I 2021-12-17 16:30:45,853] Trial 627 pruned. [I 2021-12-17 16:30:50,923] Trial 628 pruned. [I 2021-12-17 16:33:26,071] Trial 629 finished with value: 160.96868896484375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 374, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:33:30,819] Trial 630 pruned. [I 2021-12-17 16:33:32,427] Trial 631 pruned. [I 2021-12-17 16:33:38,619] Trial 632 pruned. [I 2021-12-17 16:33:41,027] Trial 633 pruned. [I 2021-12-17 16:33:44,278] Trial 634 pruned. [I 2021-12-17 16:33:46,160] Trial 635 pruned. [I 2021-12-17 16:33:52,476] Trial 636 pruned. [I 2021-12-17 16:33:53,346] Trial 637 pruned. [I 2021-12-17 16:33:59,857] Trial 638 pruned. [I 2021-12-17 16:34:01,135] Trial 639 pruned. [I 2021-12-17 16:34:03,046] Trial 640 pruned. [I 2021-12-17 16:34:06,059] Trial 641 pruned. [I 2021-12-17 16:34:09,349] Trial 642 pruned. [I 2021-12-17 16:34:09,902] Trial 643 pruned. [I 2021-12-17 16:34:15,361] Trial 644 pruned. [I 2021-12-17 16:34:18,765] Trial 645 pruned. [I 2021-12-17 16:34:23,348] Trial 646 pruned. [I 2021-12-17 16:34:23,774] Trial 647 pruned. [I 2021-12-17 16:34:26,835] Trial 648 pruned. [I 2021-12-17 16:34:33,750] Trial 649 pruned. [I 2021-12-17 16:34:35,379] Trial 650 pruned. [I 2021-12-17 16:34:37,023] Trial 651 pruned. [I 2021-12-17 16:34:41,528] Trial 652 pruned. [I 2021-12-17 16:34:43,145] Trial 653 pruned. [I 2021-12-17 16:34:47,128] Trial 654 pruned. [I 2021-12-17 16:34:49,603] Trial 655 pruned. [I 2021-12-17 16:34:51,809] Trial 656 pruned. [I 2021-12-17 16:34:55,283] Trial 657 pruned. [I 2021-12-17 16:34:57,125] Trial 658 pruned. [I 2021-12-17 16:35:00,415] Trial 659 pruned. [I 2021-12-17 16:37:36,026] Trial 660 finished with value: 162.6023406982422 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 364, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:37:36,656] Trial 661 pruned. [I 2021-12-17 16:37:39,831] Trial 662 pruned. [I 2021-12-17 16:37:41,361] Trial 663 pruned. [I 2021-12-17 16:37:44,491] Trial 664 pruned. [I 2021-12-17 16:37:49,457] Trial 665 pruned. [I 2021-12-17 16:37:52,463] Trial 666 pruned. [I 2021-12-17 16:37:53,031] Trial 667 pruned. [I 2021-12-17 16:37:54,639] Trial 668 pruned. [I 2021-12-17 16:37:59,223] Trial 669 pruned. [I 2021-12-17 16:38:05,658] Trial 670 pruned. [I 2021-12-17 16:38:05,937] Trial 671 pruned. [I 2021-12-17 16:38:08,231] Trial 672 pruned. [I 2021-12-17 16:38:11,663] Trial 673 pruned. [I 2021-12-17 16:38:13,206] Trial 674 pruned. [I 2021-12-17 16:38:14,786] Trial 675 pruned. [I 2021-12-17 16:38:20,034] Trial 676 pruned. [I 2021-12-17 16:38:20,918] Trial 677 pruned. [I 2021-12-17 16:38:23,916] Trial 678 pruned. [I 2021-12-17 16:41:01,113] Trial 679 finished with value: 166.01651000976562 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 382, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:41:04,483] Trial 680 pruned. [I 2021-12-17 16:41:07,710] Trial 681 pruned. [I 2021-12-17 16:41:11,027] Trial 682 pruned. [I 2021-12-17 16:41:12,598] Trial 683 pruned. [I 2021-12-17 16:41:14,399] Trial 684 pruned. [I 2021-12-17 16:41:25,204] Trial 685 pruned. [I 2021-12-17 16:41:26,419] Trial 686 pruned. [I 2021-12-17 16:41:28,211] Trial 687 pruned. [I 2021-12-17 16:41:29,014] Trial 688 pruned. [I 2021-12-17 16:41:32,414] Trial 689 pruned. [I 2021-12-17 16:41:35,511] Trial 690 pruned. [I 2021-12-17 16:41:37,173] Trial 691 pruned. [I 2021-12-17 16:41:37,799] Trial 692 pruned. [I 2021-12-17 16:41:42,780] Trial 693 pruned. [I 2021-12-17 16:41:48,075] Trial 694 pruned. [I 2021-12-17 16:41:53,830] Trial 695 pruned. [I 2021-12-17 16:41:54,217] Trial 696 pruned. [I 2021-12-17 16:41:55,780] Trial 697 pruned. [I 2021-12-17 16:44:30,834] Trial 698 finished with value: 166.31822204589844 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 372, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:44:32,351] Trial 699 pruned. [I 2021-12-17 16:44:33,937] Trial 700 pruned. [I 2021-12-17 16:44:37,100] Trial 701 pruned. [I 2021-12-17 16:44:38,642] Trial 702 pruned. [I 2021-12-17 16:44:40,405] Trial 703 pruned. [I 2021-12-17 16:44:42,487] Trial 704 pruned. [I 2021-12-17 16:44:44,406] Trial 705 pruned. [I 2021-12-17 16:44:50,301] Trial 706 pruned. [I 2021-12-17 16:44:55,208] Trial 707 pruned. [I 2021-12-17 16:44:58,163] Trial 708 pruned. [I 2021-12-17 16:44:59,983] Trial 709 pruned. [I 2021-12-17 16:45:00,839] Trial 710 pruned. [I 2021-12-17 16:45:04,071] Trial 711 pruned. [I 2021-12-17 16:45:09,215] Trial 712 pruned. [I 2021-12-17 16:45:10,796] Trial 713 pruned. [I 2021-12-17 16:45:12,498] Trial 714 pruned. [I 2021-12-17 16:45:18,038] Trial 715 pruned. [I 2021-12-17 16:45:18,698] Trial 716 pruned. [I 2021-12-17 16:45:21,636] Trial 717 pruned. [I 2021-12-17 16:45:23,265] Trial 718 pruned. [I 2021-12-17 16:45:24,844] Trial 719 pruned. [I 2021-12-17 16:45:25,245] Trial 720 pruned. [I 2021-12-17 16:45:27,245] Trial 721 pruned. [I 2021-12-17 16:45:28,103] Trial 722 pruned. [I 2021-12-17 16:45:31,054] Trial 723 pruned. [I 2021-12-17 16:45:40,404] Trial 724 pruned. [I 2021-12-17 16:48:36,848] Trial 725 finished with value: 162.3057861328125 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 450, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:48:38,667] Trial 726 pruned. [I 2021-12-17 16:48:40,545] Trial 727 pruned. [I 2021-12-17 16:48:43,134] Trial 728 pruned. [I 2021-12-17 16:48:46,891] Trial 729 pruned. [I 2021-12-17 16:48:48,782] Trial 730 pruned. [I 2021-12-17 16:48:52,359] Trial 731 pruned. [I 2021-12-17 16:48:53,174] Trial 732 pruned. [I 2021-12-17 16:48:54,141] Trial 733 pruned. [I 2021-12-17 16:48:55,905] Trial 734 pruned. [I 2021-12-17 16:48:57,008] Trial 735 pruned. [I 2021-12-17 16:49:00,583] Trial 736 pruned. [I 2021-12-17 16:49:03,579] Trial 737 pruned. [I 2021-12-17 16:49:05,600] Trial 738 pruned. [I 2021-12-17 16:49:07,175] Trial 739 pruned. [I 2021-12-17 16:49:08,964] Trial 740 pruned. [I 2021-12-17 16:49:09,490] Trial 741 pruned. [I 2021-12-17 16:49:11,353] Trial 742 pruned. [I 2021-12-17 16:49:13,292] Trial 743 pruned. [I 2021-12-17 16:49:14,947] Trial 744 pruned. [I 2021-12-17 16:49:15,175] Trial 745 pruned. [I 2021-12-17 16:49:16,706] Trial 746 pruned. [I 2021-12-17 16:49:19,994] Trial 747 pruned. [I 2021-12-17 16:49:24,726] Trial 748 pruned. [I 2021-12-17 16:49:26,400] Trial 749 pruned. [I 2021-12-17 16:49:28,457] Trial 750 pruned. [I 2021-12-17 16:49:30,243] Trial 751 pruned. [I 2021-12-17 16:51:59,045] Trial 752 finished with value: 167.55625915527344 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 350, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:52:00,555] Trial 753 pruned. [I 2021-12-17 16:52:02,372] Trial 754 pruned. [I 2021-12-17 16:52:03,812] Trial 755 pruned. [I 2021-12-17 16:52:06,771] Trial 756 pruned. [I 2021-12-17 16:52:11,412] Trial 757 pruned. [I 2021-12-17 16:52:12,874] Trial 758 pruned. [I 2021-12-17 16:52:13,749] Trial 759 pruned. [I 2021-12-17 16:52:18,203] Trial 760 pruned. [I 2021-12-17 16:54:53,274] Trial 761 finished with value: 170.00071716308594 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 372, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:54:54,768] Trial 762 pruned. [I 2021-12-17 16:54:55,824] Trial 763 pruned. [I 2021-12-17 16:54:58,887] Trial 764 pruned. [I 2021-12-17 16:55:00,504] Trial 765 pruned. [I 2021-12-17 16:55:02,155] Trial 766 pruned. [I 2021-12-17 16:55:02,830] Trial 767 pruned. [I 2021-12-17 16:55:06,020] Trial 768 pruned. [I 2021-12-17 16:55:06,392] Trial 769 pruned. [I 2021-12-17 16:55:07,810] Trial 770 pruned. [I 2021-12-17 16:55:09,713] Trial 771 pruned. [I 2021-12-17 16:55:10,919] Trial 772 pruned. [I 2021-12-17 16:55:12,534] Trial 773 pruned. [I 2021-12-17 16:55:14,056] Trial 774 pruned. [I 2021-12-17 16:55:15,631] Trial 775 pruned. [I 2021-12-17 16:55:17,252] Trial 776 pruned. [I 2021-12-17 16:57:53,795] Trial 777 finished with value: 183.32510375976562 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 378, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:57:56,771] Trial 778 pruned. [I 2021-12-17 16:57:58,906] Trial 779 pruned. [I 2021-12-17 16:58:00,853] Trial 780 pruned. [I 2021-12-17 16:58:03,554] Trial 781 pruned. [I 2021-12-17 16:58:08,586] Trial 782 pruned. [I 2021-12-17 16:58:10,048] Trial 783 pruned. [I 2021-12-17 16:58:13,148] Trial 784 pruned. [I 2021-12-17 16:58:14,807] Trial 785 pruned. [I 2021-12-17 16:58:21,079] Trial 786 pruned. [I 2021-12-17 16:58:23,889] Trial 787 pruned. [I 2021-12-17 16:58:25,956] Trial 788 pruned. [I 2021-12-17 16:59:49,427] Trial 789 finished with value: 164.94981384277344 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 406, 'out_channel': 32, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 16:59:49,756] Trial 790 pruned. [I 2021-12-17 16:59:50,634] Trial 791 pruned. [I 2021-12-17 16:59:53,384] Trial 792 pruned. [I 2021-12-17 16:59:55,141] Trial 793 pruned. [I 2021-12-17 16:59:56,016] Trial 794 pruned. [I 2021-12-17 16:59:56,252] Trial 795 pruned. [I 2021-12-17 16:59:57,109] Trial 796 pruned. [I 2021-12-17 16:59:58,851] Trial 797 pruned. [I 2021-12-17 17:00:04,423] Trial 798 pruned. [I 2021-12-17 17:00:05,675] Trial 799 pruned. [I 2021-12-17 17:00:06,412] Trial 800 pruned. [I 2021-12-17 17:00:07,322] Trial 801 pruned. [I 2021-12-17 17:00:48,298] Trial 802 pruned. [I 2021-12-17 17:00:49,151] Trial 803 pruned. [I 2021-12-17 17:00:50,059] Trial 804 pruned. [I 2021-12-17 17:00:51,014] Trial 805 pruned. [I 2021-12-17 17:00:52,787] Trial 806 pruned. [I 2021-12-17 17:00:58,995] Trial 807 pruned. [I 2021-12-17 17:00:59,920] Trial 808 pruned. [I 2021-12-17 17:01:04,572] Trial 809 pruned. [I 2021-12-17 17:01:05,531] Trial 810 pruned. [I 2021-12-17 17:01:10,395] Trial 811 pruned. [I 2021-12-17 17:01:14,292] Trial 812 pruned. [I 2021-12-17 17:01:16,006] Trial 813 pruned. [I 2021-12-17 17:01:16,573] Trial 814 pruned. [I 2021-12-17 17:01:18,096] Trial 815 pruned. [I 2021-12-17 17:01:19,176] Trial 816 pruned. [I 2021-12-17 17:01:22,842] Trial 817 pruned. [I 2021-12-17 17:01:23,248] Trial 818 pruned. [I 2021-12-17 17:01:24,850] Trial 819 pruned. [I 2021-12-17 17:01:26,957] Trial 820 pruned. [I 2021-12-17 17:01:30,670] Trial 821 pruned. [I 2021-12-17 17:01:32,203] Trial 822 pruned. [I 2021-12-17 17:01:33,853] Trial 823 pruned. [I 2021-12-17 17:01:35,477] Trial 824 pruned. [I 2021-12-17 17:01:39,098] Trial 825 pruned. [I 2021-12-17 17:01:40,166] Trial 826 pruned. [I 2021-12-17 17:01:46,783] Trial 827 pruned. [I 2021-12-17 17:01:48,386] Trial 828 pruned. [I 2021-12-17 17:01:51,273] Trial 829 pruned. [I 2021-12-17 17:01:54,646] Trial 830 pruned. [I 2021-12-17 17:01:56,275] Trial 831 pruned. [I 2021-12-17 17:01:57,121] Trial 832 pruned. [I 2021-12-17 17:01:58,802] Trial 833 pruned. [I 2021-12-17 17:02:01,890] Trial 834 pruned. [I 2021-12-17 17:02:03,408] Trial 835 pruned. [I 2021-12-17 17:02:04,561] Trial 836 pruned. [I 2021-12-17 17:02:05,647] Trial 837 pruned. [I 2021-12-17 17:02:07,295] Trial 838 pruned. [I 2021-12-17 17:02:08,499] Trial 839 pruned. [I 2021-12-17 17:02:09,987] Trial 840 pruned. [I 2021-12-17 17:02:10,534] Trial 841 pruned. [I 2021-12-17 17:02:10,979] Trial 842 pruned. [I 2021-12-17 17:02:12,600] Trial 843 pruned. [I 2021-12-17 17:04:56,491] Trial 844 finished with value: 159.85105895996094 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 406, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 17:04:58,278] Trial 845 pruned. [I 2021-12-17 17:05:01,679] Trial 846 pruned. [I 2021-12-17 17:05:03,473] Trial 847 pruned. [I 2021-12-17 17:05:10,378] Trial 848 pruned. [I 2021-12-17 17:05:12,048] Trial 849 pruned. [I 2021-12-17 17:05:15,395] Trial 850 pruned. [I 2021-12-17 17:05:16,391] Trial 851 pruned. [I 2021-12-17 17:05:23,560] Trial 852 pruned. [I 2021-12-17 17:05:26,870] Trial 853 pruned. [I 2021-12-17 17:05:33,560] Trial 854 pruned. [I 2021-12-17 17:05:35,414] Trial 855 pruned. [I 2021-12-17 17:05:36,520] Trial 856 pruned. [I 2021-12-17 17:05:42,970] Trial 857 pruned. [I 2021-12-17 17:05:48,141] Trial 858 pruned. [I 2021-12-17 17:05:54,705] Trial 859 pruned. [I 2021-12-17 17:05:55,895] Trial 860 pruned. [I 2021-12-17 17:05:59,339] Trial 861 pruned. [I 2021-12-17 17:06:03,032] Trial 862 pruned. [I 2021-12-17 17:06:04,724] Trial 863 pruned. [I 2021-12-17 17:06:05,417] Trial 864 pruned. [I 2021-12-17 17:06:07,156] Trial 865 pruned. [I 2021-12-17 17:06:10,368] Trial 866 pruned. [I 2021-12-17 17:06:10,696] Trial 867 pruned. [I 2021-12-17 17:06:15,814] Trial 868 pruned. [I 2021-12-17 17:06:17,394] Trial 869 pruned. [I 2021-12-17 17:06:21,112] Trial 870 pruned. [I 2021-12-17 17:06:25,949] Trial 871 pruned. [I 2021-12-17 17:06:31,861] Trial 872 pruned. [I 2021-12-17 17:06:33,807] Trial 873 pruned. [I 2021-12-17 17:06:37,600] Trial 874 pruned. [I 2021-12-17 17:06:39,187] Trial 875 pruned. [I 2021-12-17 17:06:43,006] Trial 876 pruned. [I 2021-12-17 17:06:44,736] Trial 877 pruned. [I 2021-12-17 17:06:46,746] Trial 878 pruned. [I 2021-12-17 17:06:50,847] Trial 879 pruned. [I 2021-12-17 17:06:52,787] Trial 880 pruned. [I 2021-12-17 17:06:57,348] Trial 881 pruned. [I 2021-12-17 17:06:58,597] Trial 882 pruned. [I 2021-12-17 17:07:01,555] Trial 883 pruned. [I 2021-12-17 17:07:08,017] Trial 884 pruned. [I 2021-12-17 17:07:09,626] Trial 885 pruned. [I 2021-12-17 17:07:11,017] Trial 886 pruned. [I 2021-12-17 17:07:15,893] Trial 887 pruned. [I 2021-12-17 17:07:19,445] Trial 888 pruned. [I 2021-12-17 17:07:19,776] Trial 889 pruned. [I 2021-12-17 17:10:20,857] Trial 890 finished with value: 173.23228454589844 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 468, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 17:10:21,247] Trial 891 pruned. [I 2021-12-17 17:10:22,110] Trial 892 pruned. [I 2021-12-17 17:10:25,257] Trial 893 pruned. [I 2021-12-17 17:10:26,358] Trial 894 pruned. [I 2021-12-17 17:10:32,345] Trial 895 pruned. [I 2021-12-17 17:10:33,981] Trial 896 pruned. [I 2021-12-17 17:10:37,556] Trial 897 pruned. [I 2021-12-17 17:10:40,900] Trial 898 pruned. [I 2021-12-17 17:10:43,764] Trial 899 pruned. [I 2021-12-17 17:10:44,942] Trial 900 pruned. [I 2021-12-17 17:10:46,624] Trial 901 pruned. [I 2021-12-17 17:10:49,719] Trial 902 pruned. [I 2021-12-17 17:10:55,955] Trial 903 pruned. [I 2021-12-17 17:10:56,939] Trial 904 pruned. [I 2021-12-17 17:11:00,517] Trial 905 pruned. [I 2021-12-17 17:11:05,612] Trial 906 pruned. [I 2021-12-17 17:11:08,534] Trial 907 pruned. [I 2021-12-17 17:11:20,005] Trial 908 pruned. [I 2021-12-17 17:11:22,228] Trial 909 pruned. [I 2021-12-17 17:11:25,483] Trial 910 pruned. [I 2021-12-17 17:11:28,386] Trial 911 pruned. [I 2021-12-17 17:11:31,998] Trial 912 pruned. [I 2021-12-17 17:11:32,424] Trial 913 pruned. [I 2021-12-17 17:11:35,538] Trial 914 pruned. [I 2021-12-17 17:11:35,910] Trial 915 pruned. [I 2021-12-17 17:11:39,412] Trial 916 pruned. [I 2021-12-17 17:11:45,690] Trial 917 pruned. [I 2021-12-17 17:11:47,352] Trial 918 pruned. [I 2021-12-17 17:14:45,661] Trial 919 finished with value: 170.4480438232422 and parameters: {'batch_size': 16, 'n_hdn_layers': 5, 'neurons_HL1': 444, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'relu'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 17:14:48,771] Trial 920 pruned. [I 2021-12-17 17:14:50,533] Trial 921 pruned. [I 2021-12-17 17:14:52,125] Trial 922 pruned. [I 2021-12-17 17:14:54,149] Trial 923 pruned. [I 2021-12-17 17:14:56,211] Trial 924 pruned. [I 2021-12-17 17:14:57,477] Trial 925 pruned. [I 2021-12-17 17:14:58,332] Trial 926 pruned. [I 2021-12-17 17:15:01,767] Trial 927 pruned. [I 2021-12-17 17:15:04,977] Trial 928 pruned. [I 2021-12-17 17:15:05,835] Trial 929 pruned. [I 2021-12-17 17:15:09,080] Trial 930 pruned. [I 2021-12-17 17:15:10,509] Trial 931 pruned. [I 2021-12-17 17:15:13,465] Trial 932 pruned. [I 2021-12-17 17:15:15,101] Trial 933 pruned. [I 2021-12-17 17:15:21,983] Trial 934 pruned. [I 2021-12-17 17:15:22,674] Trial 935 pruned. [I 2021-12-17 17:15:26,330] Trial 936 pruned. [I 2021-12-17 17:15:27,911] Trial 937 pruned. [I 2021-12-17 17:15:28,925] Trial 938 pruned. [I 2021-12-17 17:15:30,384] Trial 939 pruned. [I 2021-12-17 17:15:36,773] Trial 940 pruned. [I 2021-12-17 17:15:37,168] Trial 941 pruned. [I 2021-12-17 17:15:42,361] Trial 942 pruned. [I 2021-12-17 17:15:45,488] Trial 943 pruned. [I 2021-12-17 17:15:48,494] Trial 944 pruned. [I 2021-12-17 17:15:53,360] Trial 945 pruned. [I 2021-12-17 17:15:55,593] Trial 946 pruned. [I 2021-12-17 17:15:58,975] Trial 947 pruned. [I 2021-12-17 17:16:00,976] Trial 948 pruned. [I 2021-12-17 17:16:03,833] Trial 949 pruned. [I 2021-12-17 17:16:04,914] Trial 950 pruned. [I 2021-12-17 17:16:07,504] Trial 951 pruned. [I 2021-12-17 17:16:10,626] Trial 952 pruned. [I 2021-12-17 17:16:12,238] Trial 953 pruned. [I 2021-12-17 17:16:13,795] Trial 954 pruned. [I 2021-12-17 17:16:15,019] Trial 955 pruned. [I 2021-12-17 17:16:16,994] Trial 956 pruned. [I 2021-12-17 17:16:21,503] Trial 957 pruned. [I 2021-12-17 17:16:23,250] Trial 958 pruned. [I 2021-12-17 17:16:24,920] Trial 959 pruned. [I 2021-12-17 17:16:27,951] Trial 960 pruned. [I 2021-12-17 17:16:29,099] Trial 961 pruned. [I 2021-12-17 17:16:30,747] Trial 962 pruned. [I 2021-12-17 17:16:32,491] Trial 963 pruned. [I 2021-12-17 17:16:32,870] Trial 964 pruned. [I 2021-12-17 17:16:35,052] Trial 965 pruned. [I 2021-12-17 17:16:36,562] Trial 966 pruned. [I 2021-12-17 17:16:40,261] Trial 967 pruned. [I 2021-12-17 17:16:41,864] Trial 968 pruned. [I 2021-12-17 17:16:45,061] Trial 969 pruned. [I 2021-12-17 17:16:47,904] Trial 970 pruned. [I 2021-12-17 17:16:51,493] Trial 971 pruned. [I 2021-12-17 17:16:54,825] Trial 972 pruned. [I 2021-12-17 17:16:56,419] Trial 973 pruned. [I 2021-12-17 17:16:57,544] Trial 974 pruned. [I 2021-12-17 17:17:00,551] Trial 975 pruned. [I 2021-12-17 17:17:01,905] Trial 976 pruned. [I 2021-12-17 17:17:02,865] Trial 977 pruned. [I 2021-12-17 17:17:03,841] Trial 978 pruned. [I 2021-12-17 17:17:10,300] Trial 979 pruned. [I 2021-12-17 17:17:13,958] Trial 980 pruned. [I 2021-12-17 17:17:15,318] Trial 981 pruned. [I 2021-12-17 17:17:18,888] Trial 982 pruned. [I 2021-12-17 17:17:20,475] Trial 983 pruned. [I 2021-12-17 17:17:21,347] Trial 984 pruned. [I 2021-12-17 17:17:22,886] Trial 985 pruned. [I 2021-12-17 17:17:24,525] Trial 986 pruned. [I 2021-12-17 17:17:26,180] Trial 987 pruned. [I 2021-12-17 17:17:26,681] Trial 988 pruned. [I 2021-12-17 17:17:29,088] Trial 989 pruned. [I 2021-12-17 17:17:32,229] Trial 990 pruned. [I 2021-12-17 17:17:35,590] Trial 991 pruned. [I 2021-12-17 17:17:37,720] Trial 992 pruned. [I 2021-12-17 17:17:39,638] Trial 993 pruned. [I 2021-12-17 17:17:43,695] Trial 994 pruned. [I 2021-12-17 17:17:47,156] Trial 995 pruned. [I 2021-12-17 17:17:50,444] Trial 996 pruned. [I 2021-12-17 17:17:55,812] Trial 997 pruned. [I 2021-12-17 17:17:58,908] Trial 998 pruned. [I 2021-12-17 17:18:00,681] Trial 999 pruned. [I 2021-12-17 17:18:02,246] Trial 1000 pruned. [I 2021-12-17 17:18:02,742] Trial 1001 pruned. [I 2021-12-17 17:18:04,459] Trial 1002 pruned. [I 2021-12-17 17:18:07,354] Trial 1003 pruned. [I 2021-12-17 17:18:11,972] Trial 1004 pruned. [I 2021-12-17 17:18:12,981] Trial 1005 pruned. [I 2021-12-17 17:18:14,438] Trial 1006 pruned. [I 2021-12-17 17:18:15,783] Trial 1007 pruned. [I 2021-12-17 17:18:18,118] Trial 1008 pruned. [I 2021-12-17 17:18:18,828] Trial 1009 pruned. [I 2021-12-17 17:18:22,965] Trial 1010 pruned. [I 2021-12-17 17:18:23,418] Trial 1011 pruned. [I 2021-12-17 17:18:24,268] Trial 1012 pruned. [I 2021-12-17 17:18:26,989] Trial 1013 pruned. [I 2021-12-17 17:18:28,208] Trial 1014 pruned. [I 2021-12-17 17:18:31,196] Trial 1015 pruned. [I 2021-12-17 17:18:38,807] Trial 1016 pruned. [I 2021-12-17 17:18:42,430] Trial 1017 pruned. [I 2021-12-17 17:18:44,055] Trial 1018 pruned. [I 2021-12-17 17:18:45,475] Trial 1019 pruned. [I 2021-12-17 17:18:47,360] Trial 1020 pruned. [I 2021-12-17 17:18:49,135] Trial 1021 pruned. [I 2021-12-17 17:18:50,859] Trial 1022 pruned. [I 2021-12-17 17:18:52,375] Trial 1023 pruned. [I 2021-12-17 17:18:55,349] Trial 1024 pruned. [I 2021-12-17 17:18:56,889] Trial 1025 pruned. [I 2021-12-17 17:18:58,172] Trial 1026 pruned. [I 2021-12-17 17:18:59,865] Trial 1027 pruned. [I 2021-12-17 17:19:01,948] Trial 1028 pruned. [I 2021-12-17 17:19:03,580] Trial 1029 pruned. [I 2021-12-17 17:19:10,683] Trial 1030 pruned. [I 2021-12-17 17:19:12,360] Trial 1031 pruned. [I 2021-12-17 17:19:13,103] Trial 1032 pruned. [I 2021-12-17 17:19:16,120] Trial 1033 pruned. [I 2021-12-17 17:19:19,259] Trial 1034 pruned. [I 2021-12-17 17:19:21,283] Trial 1035 pruned. [I 2021-12-17 17:19:21,673] Trial 1036 pruned. [I 2021-12-17 17:19:24,967] Trial 1037 pruned. [I 2021-12-17 17:19:25,898] Trial 1038 pruned. [I 2021-12-17 17:19:27,439] Trial 1039 pruned. [I 2021-12-17 17:19:33,445] Trial 1040 pruned. [I 2021-12-17 17:19:35,371] Trial 1041 pruned. [I 2021-12-17 17:19:36,488] Trial 1042 pruned. [I 2021-12-17 17:22:15,880] Trial 1043 finished with value: 172.83897399902344 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 380, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 17:22:24,088] Trial 1044 pruned. [I 2021-12-17 17:22:27,674] Trial 1045 pruned. [I 2021-12-17 17:22:29,341] Trial 1046 pruned. [I 2021-12-17 17:22:30,930] Trial 1047 pruned. [I 2021-12-17 17:22:32,435] Trial 1048 pruned. [I 2021-12-17 17:22:34,224] Trial 1049 pruned. [I 2021-12-17 17:22:35,204] Trial 1050 pruned. [I 2021-12-17 17:22:36,992] Trial 1051 pruned. [I 2021-12-17 17:22:37,791] Trial 1052 pruned. [I 2021-12-17 17:22:39,289] Trial 1053 pruned. [I 2021-12-17 17:22:42,208] Trial 1054 pruned. [I 2021-12-17 17:25:21,678] Trial 1055 finished with value: 169.59767150878906 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 382, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 17:25:23,148] Trial 1056 pruned. [I 2021-12-17 17:25:24,903] Trial 1057 pruned. [I 2021-12-17 17:25:25,885] Trial 1058 pruned. [I 2021-12-17 17:25:30,963] Trial 1059 pruned. [I 2021-12-17 17:25:31,409] Trial 1060 pruned. [I 2021-12-17 17:25:34,618] Trial 1061 pruned. [I 2021-12-17 17:25:37,609] Trial 1062 pruned. [I 2021-12-17 17:25:39,063] Trial 1063 pruned. [I 2021-12-17 17:25:42,484] Trial 1064 pruned. [I 2021-12-17 17:25:46,117] Trial 1065 pruned. [I 2021-12-17 17:25:53,706] Trial 1066 pruned. [I 2021-12-17 17:25:57,125] Trial 1067 pruned. [I 2021-12-17 17:25:59,208] Trial 1068 pruned. [I 2021-12-17 17:26:02,475] Trial 1069 pruned. [I 2021-12-17 17:26:04,031] Trial 1070 pruned. [I 2021-12-17 17:26:06,563] Trial 1071 pruned. [I 2021-12-17 17:26:08,358] Trial 1072 pruned. [I 2021-12-17 17:26:09,894] Trial 1073 pruned. [I 2021-12-17 17:26:11,470] Trial 1074 pruned. [I 2021-12-17 17:26:11,998] Trial 1075 pruned. [I 2021-12-17 17:26:14,539] Trial 1076 pruned. [I 2021-12-17 17:26:17,713] Trial 1077 pruned. [I 2021-12-17 17:26:19,517] Trial 1078 pruned. [I 2021-12-17 17:26:24,635] Trial 1079 pruned. [I 2021-12-17 17:26:25,720] Trial 1080 pruned. [I 2021-12-17 17:26:28,872] Trial 1081 pruned. [I 2021-12-17 17:26:32,656] Trial 1082 pruned. [I 2021-12-17 17:26:33,040] Trial 1083 pruned. [I 2021-12-17 17:26:33,610] Trial 1084 pruned. [I 2021-12-17 17:26:36,428] Trial 1085 pruned. [I 2021-12-17 17:26:39,805] Trial 1086 pruned. [I 2021-12-17 17:26:43,160] Trial 1087 pruned. [I 2021-12-17 17:26:44,057] Trial 1088 pruned. [I 2021-12-17 17:26:51,070] Trial 1089 pruned. [I 2021-12-17 17:26:54,195] Trial 1090 pruned. [I 2021-12-17 17:26:55,241] Trial 1091 pruned. [I 2021-12-17 17:26:56,944] Trial 1092 pruned. [I 2021-12-17 17:26:58,644] Trial 1093 pruned. [I 2021-12-17 17:26:59,795] Trial 1094 pruned. [I 2021-12-17 17:27:01,348] Trial 1095 pruned. [I 2021-12-17 17:27:03,146] Trial 1096 pruned. [I 2021-12-17 17:27:04,116] Trial 1097 pruned. [I 2021-12-17 17:27:07,958] Trial 1098 pruned. [I 2021-12-17 17:27:09,939] Trial 1099 pruned. [I 2021-12-17 17:27:10,809] Trial 1100 pruned. [I 2021-12-17 17:27:14,301] Trial 1101 pruned. [I 2021-12-17 17:27:15,902] Trial 1102 pruned. [I 2021-12-17 17:27:18,135] Trial 1103 pruned. [I 2021-12-17 17:27:19,095] Trial 1104 pruned. [I 2021-12-17 17:27:25,050] Trial 1105 pruned. [I 2021-12-17 17:27:28,193] Trial 1106 pruned. [I 2021-12-17 17:27:30,803] Trial 1107 pruned. [I 2021-12-17 17:27:37,448] Trial 1108 pruned. [I 2021-12-17 17:27:37,877] Trial 1109 pruned. [I 2021-12-17 17:27:39,744] Trial 1110 pruned. [I 2021-12-17 17:27:43,682] Trial 1111 pruned. [I 2021-12-17 17:27:46,459] Trial 1112 pruned. [I 2021-12-17 17:27:47,818] Trial 1113 pruned. [I 2021-12-17 17:27:49,515] Trial 1114 pruned. [I 2021-12-17 17:27:51,128] Trial 1115 pruned. [I 2021-12-17 17:27:52,920] Trial 1116 pruned. [I 2021-12-17 17:27:55,921] Trial 1117 pruned. [I 2021-12-17 17:27:59,074] Trial 1118 pruned. [I 2021-12-17 17:28:01,422] Trial 1119 pruned. [I 2021-12-17 17:28:11,053] Trial 1120 pruned. [I 2021-12-17 17:28:12,201] Trial 1121 pruned. [I 2021-12-17 17:28:14,226] Trial 1122 pruned. [I 2021-12-17 17:28:16,725] Trial 1123 pruned. [I 2021-12-17 17:28:18,386] Trial 1124 pruned. [I 2021-12-17 17:28:19,237] Trial 1125 pruned. [I 2021-12-17 17:28:20,512] Trial 1126 pruned. [I 2021-12-17 17:28:23,258] Trial 1127 pruned. [I 2021-12-17 17:28:24,879] Trial 1128 pruned. [I 2021-12-17 17:28:25,465] Trial 1129 pruned. [I 2021-12-17 17:28:29,063] Trial 1130 pruned. [I 2021-12-17 17:28:32,470] Trial 1131 pruned. [I 2021-12-17 17:28:34,174] Trial 1132 pruned. [I 2021-12-17 17:28:34,619] Trial 1133 pruned. [I 2021-12-17 17:28:36,186] Trial 1134 pruned. [I 2021-12-17 17:28:37,820] Trial 1135 pruned. [I 2021-12-17 17:28:41,033] Trial 1136 pruned. [I 2021-12-17 17:28:41,986] Trial 1137 pruned. [I 2021-12-17 17:28:43,741] Trial 1138 pruned. [I 2021-12-17 17:28:45,473] Trial 1139 pruned. [I 2021-12-17 17:28:51,417] Trial 1140 pruned. [I 2021-12-17 17:28:56,980] Trial 1141 pruned. [I 2021-12-17 17:29:03,014] Trial 1142 pruned. [I 2021-12-17 17:29:04,522] Trial 1143 pruned. [I 2021-12-17 17:29:06,247] Trial 1144 pruned. [I 2021-12-17 17:29:10,466] Trial 1145 pruned. [I 2021-12-17 17:29:10,968] Trial 1146 pruned. [I 2021-12-17 17:29:14,289] Trial 1147 pruned. [I 2021-12-17 17:29:15,587] Trial 1148 pruned. [I 2021-12-17 17:29:17,145] Trial 1149 pruned. [I 2021-12-17 17:29:18,343] Trial 1150 pruned. [I 2021-12-17 17:29:19,958] Trial 1151 pruned. [I 2021-12-17 17:29:30,770] Trial 1152 pruned. [I 2021-12-17 17:29:34,221] Trial 1153 pruned. [I 2021-12-17 17:29:35,585] Trial 1154 pruned. [I 2021-12-17 17:29:41,782] Trial 1155 pruned. [I 2021-12-17 17:29:42,190] Trial 1156 pruned. [I 2021-12-17 17:29:47,253] Trial 1157 pruned. [I 2021-12-17 17:29:51,439] Trial 1158 pruned. [I 2021-12-17 17:29:52,329] Trial 1159 pruned. [I 2021-12-17 17:29:55,871] Trial 1160 pruned. [I 2021-12-17 17:29:59,080] Trial 1161 pruned. [I 2021-12-17 17:29:59,958] Trial 1162 pruned. [I 2021-12-17 17:30:01,886] Trial 1163 pruned. [I 2021-12-17 17:30:05,309] Trial 1164 pruned. [I 2021-12-17 17:30:07,006] Trial 1165 pruned. [I 2021-12-17 17:30:10,052] Trial 1166 pruned. [I 2021-12-17 17:30:11,202] Trial 1167 pruned. [I 2021-12-17 17:30:14,854] Trial 1168 pruned. [I 2021-12-17 17:30:21,533] Trial 1169 pruned. [I 2021-12-17 17:30:22,228] Trial 1170 pruned. [I 2021-12-17 17:30:27,066] Trial 1171 pruned. [I 2021-12-17 17:30:30,175] Trial 1172 pruned. [I 2021-12-17 17:30:33,932] Trial 1173 pruned. [I 2021-12-17 17:30:35,732] Trial 1174 pruned. [I 2021-12-17 17:30:36,648] Trial 1175 pruned. [I 2021-12-17 17:30:42,792] Trial 1176 pruned. [I 2021-12-17 17:30:43,649] Trial 1177 pruned. [I 2021-12-17 17:30:45,185] Trial 1178 pruned. [I 2021-12-17 17:30:48,758] Trial 1179 pruned. [I 2021-12-17 17:30:49,184] Trial 1180 pruned. [I 2021-12-17 17:30:52,171] Trial 1181 pruned. [I 2021-12-17 17:30:53,972] Trial 1182 pruned. [I 2021-12-17 17:30:58,665] Trial 1183 pruned. [I 2021-12-17 17:31:00,766] Trial 1184 pruned. [I 2021-12-17 17:31:04,054] Trial 1185 pruned. [I 2021-12-17 17:31:06,001] Trial 1186 pruned. [I 2021-12-17 17:31:07,490] Trial 1187 pruned. [I 2021-12-17 17:31:09,046] Trial 1188 pruned. [I 2021-12-17 17:31:11,694] Trial 1189 pruned. [I 2021-12-17 17:31:13,188] Trial 1190 pruned. [I 2021-12-17 17:31:16,397] Trial 1191 pruned. [I 2021-12-17 17:31:21,076] Trial 1192 pruned. [I 2021-12-17 17:31:25,040] Trial 1193 pruned. [I 2021-12-17 17:31:26,050] Trial 1194 pruned. [I 2021-12-17 17:31:29,330] Trial 1195 pruned. [I 2021-12-17 17:31:30,855] Trial 1196 pruned. [I 2021-12-17 17:31:37,909] Trial 1197 pruned. [I 2021-12-17 17:31:39,033] Trial 1198 pruned. [I 2021-12-17 17:31:40,829] Trial 1199 pruned. [I 2021-12-17 17:31:42,167] Trial 1200 pruned. [I 2021-12-17 17:31:42,744] Trial 1201 pruned. [I 2021-12-17 17:31:44,479] Trial 1202 pruned. [I 2021-12-17 17:31:47,501] Trial 1203 pruned. [I 2021-12-17 17:31:51,208] Trial 1204 pruned. [I 2021-12-17 17:31:51,704] Trial 1205 pruned. [I 2021-12-17 17:31:54,874] Trial 1206 pruned. [I 2021-12-17 17:31:56,959] Trial 1207 pruned. [I 2021-12-17 17:32:02,678] Trial 1208 pruned. [I 2021-12-17 17:32:06,214] Trial 1209 pruned. [I 2021-12-17 17:32:07,441] Trial 1210 pruned. [I 2021-12-17 17:32:08,612] Trial 1211 pruned. [I 2021-12-17 17:32:10,136] Trial 1212 pruned. [I 2021-12-17 17:32:11,136] Trial 1213 pruned. [I 2021-12-17 17:32:14,387] Trial 1214 pruned. [I 2021-12-17 17:32:15,829] Trial 1215 pruned. [I 2021-12-17 17:32:17,553] Trial 1216 pruned. [I 2021-12-17 17:32:23,629] Trial 1217 pruned. [I 2021-12-17 17:32:26,761] Trial 1218 pruned. [I 2021-12-17 17:32:29,765] Trial 1219 pruned. [I 2021-12-17 17:32:30,717] Trial 1220 pruned. [I 2021-12-17 17:32:32,510] Trial 1221 pruned. [I 2021-12-17 17:32:37,077] Trial 1222 pruned. [I 2021-12-17 17:32:39,484] Trial 1223 pruned. [I 2021-12-17 17:32:41,140] Trial 1224 pruned. [I 2021-12-17 17:32:41,466] Trial 1225 pruned. [I 2021-12-17 17:32:43,221] Trial 1226 pruned. [I 2021-12-17 17:32:50,768] Trial 1227 pruned. [I 2021-12-17 17:32:51,195] Trial 1228 pruned. [I 2021-12-17 17:32:54,529] Trial 1229 pruned. [I 2021-12-17 17:32:58,957] Trial 1230 pruned. [I 2021-12-17 17:33:00,400] Trial 1231 pruned. [I 2021-12-17 17:33:02,247] Trial 1232 pruned. [I 2021-12-17 17:35:37,573] Trial 1233 finished with value: 186.1397705078125 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 366, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 17:35:40,973] Trial 1234 pruned. [I 2021-12-17 17:35:45,135] Trial 1235 pruned. [I 2021-12-17 17:35:47,088] Trial 1236 pruned. [I 2021-12-17 17:35:53,515] Trial 1237 pruned. [I 2021-12-17 17:35:55,209] Trial 1238 pruned. [I 2021-12-17 17:35:56,631] Trial 1239 pruned. [I 2021-12-17 17:35:59,799] Trial 1240 pruned. [I 2021-12-17 17:36:06,448] Trial 1241 pruned. [I 2021-12-17 17:36:08,195] Trial 1242 pruned. [I 2021-12-17 17:36:09,153] Trial 1243 pruned. [I 2021-12-17 17:36:10,947] Trial 1244 pruned. [I 2021-12-17 17:36:18,045] Trial 1245 pruned. [I 2021-12-17 17:36:21,105] Trial 1246 pruned. [I 2021-12-17 17:36:22,046] Trial 1247 pruned. [I 2021-12-17 17:36:23,707] Trial 1248 pruned. [I 2021-12-17 17:36:24,053] Trial 1249 pruned. [I 2021-12-17 17:36:26,158] Trial 1250 pruned. [I 2021-12-17 17:36:29,723] Trial 1251 pruned. [I 2021-12-17 17:36:34,297] Trial 1252 pruned. [I 2021-12-17 17:36:34,785] Trial 1253 pruned. [I 2021-12-17 17:36:40,429] Trial 1254 pruned. [I 2021-12-17 17:36:42,282] Trial 1255 pruned. [I 2021-12-17 17:36:47,040] Trial 1256 pruned. [I 2021-12-17 17:36:50,424] Trial 1257 pruned. [I 2021-12-17 17:39:29,886] Trial 1258 finished with value: 169.83059692382812 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 382, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 17:42:23,476] Trial 1259 finished with value: 169.62501525878906 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 436, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 17:42:26,382] Trial 1260 pruned. [I 2021-12-17 17:42:28,141] Trial 1261 pruned. [I 2021-12-17 17:42:29,060] Trial 1262 pruned. [I 2021-12-17 17:42:30,197] Trial 1263 pruned. [I 2021-12-17 17:42:33,173] Trial 1264 pruned. [I 2021-12-17 17:42:34,882] Trial 1265 pruned. [I 2021-12-17 17:42:35,971] Trial 1266 pruned. [I 2021-12-17 17:42:37,858] Trial 1267 pruned. [I 2021-12-17 17:42:45,098] Trial 1268 pruned. [I 2021-12-17 17:42:48,283] Trial 1269 pruned. [I 2021-12-17 17:42:49,891] Trial 1270 pruned. [I 2021-12-17 17:42:51,658] Trial 1271 pruned. [I 2021-12-17 17:42:53,947] Trial 1272 pruned. [I 2021-12-17 17:42:55,582] Trial 1273 pruned. [I 2021-12-17 17:42:56,163] Trial 1274 pruned. [I 2021-12-17 17:43:03,032] Trial 1275 pruned. [I 2021-12-17 17:43:04,474] Trial 1276 pruned. [I 2021-12-17 17:43:05,574] Trial 1277 pruned. [I 2021-12-17 17:43:08,745] Trial 1278 pruned. [I 2021-12-17 17:43:11,130] Trial 1279 pruned. [I 2021-12-17 17:43:14,269] Trial 1280 pruned. [I 2021-12-17 17:43:17,797] Trial 1281 pruned. [I 2021-12-17 17:43:24,400] Trial 1282 pruned. [I 2021-12-17 17:43:24,825] Trial 1283 pruned. [I 2021-12-17 17:43:26,170] Trial 1284 pruned. [I 2021-12-17 17:43:27,672] Trial 1285 pruned. [I 2021-12-17 17:43:30,997] Trial 1286 pruned. [I 2021-12-17 17:43:34,681] Trial 1287 pruned. [I 2021-12-17 17:43:35,472] Trial 1288 pruned. [I 2021-12-17 17:43:37,028] Trial 1289 pruned. [I 2021-12-17 17:43:38,017] Trial 1290 pruned. [I 2021-12-17 17:43:41,162] Trial 1291 pruned. [I 2021-12-17 17:43:42,889] Trial 1292 pruned. [I 2021-12-17 17:43:46,651] Trial 1293 pruned. [I 2021-12-17 17:43:47,981] Trial 1294 pruned. [I 2021-12-17 17:43:49,789] Trial 1295 pruned. [I 2021-12-17 17:43:51,844] Trial 1296 pruned. [I 2021-12-17 17:43:52,860] Trial 1297 pruned. [I 2021-12-17 17:43:54,442] Trial 1298 pruned. [I 2021-12-17 17:43:58,569] Trial 1299 pruned. [I 2021-12-17 17:43:59,472] Trial 1300 pruned. [I 2021-12-17 17:44:00,049] Trial 1301 pruned. [I 2021-12-17 17:44:01,843] Trial 1302 pruned. [I 2021-12-17 17:44:05,010] Trial 1303 pruned. [I 2021-12-17 17:44:10,464] Trial 1304 pruned. [I 2021-12-17 17:44:15,043] Trial 1305 pruned. [I 2021-12-17 17:44:36,433] Trial 1306 pruned. [I 2021-12-17 17:48:40,965] Trial 1307 finished with value: 163.6394500732422 and parameters: {'batch_size': 16, 'n_hdn_layers': 2, 'neurons_HL1': 722, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 17:48:43,667] Trial 1308 pruned. [I 2021-12-17 17:48:44,964] Trial 1309 pruned. [I 2021-12-17 17:48:47,030] Trial 1310 pruned. [I 2021-12-17 17:48:47,968] Trial 1311 pruned. [I 2021-12-17 17:48:49,282] Trial 1312 pruned. [I 2021-12-17 17:48:54,683] Trial 1313 pruned. [I 2021-12-17 17:48:58,171] Trial 1314 pruned. [I 2021-12-17 17:48:59,735] Trial 1315 pruned. [I 2021-12-17 17:49:04,249] Trial 1316 pruned. [I 2021-12-17 17:49:05,897] Trial 1317 pruned. [I 2021-12-17 17:49:16,478] Trial 1318 pruned. [I 2021-12-17 17:49:21,647] Trial 1319 pruned. [I 2021-12-17 17:49:24,342] Trial 1320 pruned. [I 2021-12-17 17:49:26,763] Trial 1321 pruned. [I 2021-12-17 17:49:27,636] Trial 1322 pruned. [I 2021-12-17 17:54:37,266] Trial 1323 finished with value: 165.75975036621094 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 862, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 17:54:43,781] Trial 1324 pruned. [I 2021-12-17 17:54:57,551] Trial 1325 pruned. [I 2021-12-17 17:54:57,864] Trial 1326 pruned. [I 2021-12-17 17:55:00,702] Trial 1327 pruned. [I 2021-12-17 17:55:05,951] Trial 1328 pruned. [I 2021-12-17 17:55:12,534] Trial 1329 pruned. [I 2021-12-17 17:55:27,348] Trial 1330 pruned. [I 2021-12-17 17:55:32,598] Trial 1331 pruned. [I 2021-12-17 17:55:35,012] Trial 1332 pruned. [I 2021-12-17 17:55:41,339] Trial 1333 pruned. [I 2021-12-17 17:55:45,086] Trial 1334 pruned. [I 2021-12-17 17:55:49,990] Trial 1335 pruned. [I 2021-12-17 17:55:53,201] Trial 1336 pruned. [I 2021-12-17 17:55:56,565] Trial 1337 pruned. [I 2021-12-17 17:56:01,633] Trial 1338 pruned. [I 2021-12-17 17:56:05,246] Trial 1339 pruned. [I 2021-12-17 17:56:09,681] Trial 1340 pruned. [I 2021-12-17 17:56:13,067] Trial 1341 pruned. [I 2021-12-17 17:56:14,501] Trial 1342 pruned. [I 2021-12-17 17:56:17,539] Trial 1343 pruned. [I 2021-12-17 17:56:20,037] Trial 1344 pruned. [I 2021-12-17 17:56:26,171] Trial 1345 pruned. [I 2021-12-17 17:56:28,282] Trial 1346 pruned. [I 2021-12-17 17:56:29,975] Trial 1347 pruned. [I 2021-12-17 17:56:34,796] Trial 1348 pruned. [I 2021-12-17 17:56:41,346] Trial 1349 pruned. [I 2021-12-17 17:56:41,978] Trial 1350 pruned. [I 2021-12-17 17:56:43,895] Trial 1351 pruned. [I 2021-12-17 17:56:49,574] Trial 1352 pruned. [I 2021-12-17 17:56:56,238] Trial 1353 pruned. [I 2021-12-17 17:57:01,881] Trial 1354 pruned. [I 2021-12-17 17:57:04,617] Trial 1355 pruned. [I 2021-12-17 17:57:11,817] Trial 1356 pruned. [I 2021-12-17 17:57:15,082] Trial 1357 pruned. [I 2021-12-17 17:57:24,939] Trial 1358 pruned. [I 2021-12-17 17:57:26,405] Trial 1359 pruned. [I 2021-12-17 17:57:29,478] Trial 1360 pruned. [I 2021-12-17 17:57:33,696] Trial 1361 pruned. [I 2021-12-17 17:57:40,030] Trial 1362 pruned. [I 2021-12-17 17:57:46,757] Trial 1363 pruned. [I 2021-12-17 17:57:49,165] Trial 1364 pruned. [I 2021-12-17 17:57:50,309] Trial 1365 pruned. [I 2021-12-17 17:58:02,782] Trial 1366 pruned. [I 2021-12-17 17:58:05,381] Trial 1367 pruned. [I 2021-12-17 17:58:10,836] Trial 1368 pruned. [I 2021-12-17 17:58:14,031] Trial 1369 pruned. [I 2021-12-17 17:58:20,686] Trial 1370 pruned. [I 2021-12-17 17:58:21,214] Trial 1371 pruned. [I 2021-12-17 17:58:31,943] Trial 1372 pruned. [I 2021-12-17 17:58:40,897] Trial 1373 pruned. [I 2021-12-17 17:58:41,302] Trial 1374 pruned. [I 2021-12-17 17:58:47,671] Trial 1375 pruned. [I 2021-12-17 17:58:49,367] Trial 1376 pruned. [I 2021-12-17 17:58:50,766] Trial 1377 pruned. [I 2021-12-17 17:58:51,756] Trial 1378 pruned. [I 2021-12-17 17:58:55,058] Trial 1379 pruned. [I 2021-12-17 17:58:56,429] Trial 1380 pruned. [I 2021-12-17 17:58:57,933] Trial 1381 pruned. [I 2021-12-17 18:01:37,588] Trial 1382 finished with value: 180.20721435546875 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 380, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 18:01:38,998] Trial 1383 pruned. [I 2021-12-17 18:01:44,911] Trial 1384 pruned. [I 2021-12-17 18:01:46,001] Trial 1385 pruned. [I 2021-12-17 18:01:47,567] Trial 1386 pruned. [I 2021-12-17 18:01:52,537] Trial 1387 pruned. [I 2021-12-17 18:01:54,619] Trial 1388 pruned. [I 2021-12-17 18:01:58,669] Trial 1389 pruned. [I 2021-12-17 18:02:02,070] Trial 1390 pruned. [I 2021-12-17 18:02:03,575] Trial 1391 pruned. [I 2021-12-17 18:02:05,235] Trial 1392 pruned. [I 2021-12-17 18:02:08,676] Trial 1393 pruned. [I 2021-12-17 18:02:17,814] Trial 1394 pruned. [I 2021-12-17 18:02:18,398] Trial 1395 pruned. [I 2021-12-17 18:02:19,908] Trial 1396 pruned. [I 2021-12-17 18:02:21,588] Trial 1397 pruned. [I 2021-12-17 18:02:22,308] Trial 1398 pruned. [I 2021-12-17 18:02:23,583] Trial 1399 pruned. [I 2021-12-17 18:02:24,545] Trial 1400 pruned. [I 2021-12-17 18:02:26,097] Trial 1401 pruned. [I 2021-12-17 18:02:32,414] Trial 1402 pruned. [I 2021-12-17 18:02:35,862] Trial 1403 pruned. [I 2021-12-17 18:02:37,604] Trial 1404 pruned. [I 2021-12-17 18:02:39,628] Trial 1405 pruned. [I 2021-12-17 18:02:44,303] Trial 1406 pruned. [I 2021-12-17 18:02:45,638] Trial 1407 pruned. [I 2021-12-17 18:02:46,647] Trial 1408 pruned. [I 2021-12-17 18:02:49,603] Trial 1409 pruned. [I 2021-12-17 18:02:51,064] Trial 1410 pruned. [I 2021-12-17 18:02:52,769] Trial 1411 pruned. [I 2021-12-17 18:02:54,596] Trial 1412 pruned. [I 2021-12-17 18:02:55,553] Trial 1413 pruned. [I 2021-12-17 18:02:58,759] Trial 1414 pruned. [I 2021-12-17 18:03:04,832] Trial 1415 pruned. [I 2021-12-17 18:03:10,132] Trial 1416 pruned. [I 2021-12-17 18:03:11,882] Trial 1417 pruned. [I 2021-12-17 18:03:13,634] Trial 1418 pruned. [I 2021-12-17 18:03:14,239] Trial 1419 pruned. [I 2021-12-17 18:03:20,537] Trial 1420 pruned. [I 2021-12-17 18:03:22,987] Trial 1421 pruned. [I 2021-12-17 18:03:24,765] Trial 1422 pruned. [I 2021-12-17 18:03:25,134] Trial 1423 pruned. [I 2021-12-17 18:03:28,224] Trial 1424 pruned. [I 2021-12-17 18:03:30,627] Trial 1425 pruned. [I 2021-12-17 18:03:32,300] Trial 1426 pruned. [I 2021-12-17 18:03:34,077] Trial 1427 pruned. [I 2021-12-17 18:03:36,180] Trial 1428 pruned. [I 2021-12-17 18:03:37,566] Trial 1429 pruned. [I 2021-12-17 18:03:40,473] Trial 1430 pruned. [I 2021-12-17 18:03:41,774] Trial 1431 pruned. [I 2021-12-17 18:03:48,187] Trial 1432 pruned. [I 2021-12-17 18:03:49,328] Trial 1433 pruned. [I 2021-12-17 18:03:52,745] Trial 1434 pruned. [I 2021-12-17 18:06:43,325] Trial 1435 finished with value: 166.94862365722656 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 422, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 18:06:43,955] Trial 1436 pruned. [I 2021-12-17 18:06:50,442] Trial 1437 pruned. [I 2021-12-17 18:06:53,207] Trial 1438 pruned. [I 2021-12-17 18:06:56,495] Trial 1439 pruned. [I 2021-12-17 18:06:59,677] Trial 1440 pruned. [I 2021-12-17 18:07:01,226] Trial 1441 pruned. [I 2021-12-17 18:07:04,616] Trial 1442 pruned. [I 2021-12-17 18:07:06,801] Trial 1443 pruned. [I 2021-12-17 18:07:07,402] Trial 1444 pruned. [I 2021-12-17 18:07:09,011] Trial 1445 pruned. [I 2021-12-17 18:07:21,854] Trial 1446 pruned. [I 2021-12-17 18:07:22,313] Trial 1447 pruned. [I 2021-12-17 18:07:25,821] Trial 1448 pruned. [I 2021-12-17 18:07:27,581] Trial 1449 pruned. [I 2021-12-17 18:07:30,497] Trial 1450 pruned. [I 2021-12-17 18:07:34,931] Trial 1451 pruned. [I 2021-12-17 18:07:36,540] Trial 1452 pruned. [I 2021-12-17 18:07:38,555] Trial 1453 pruned. [I 2021-12-17 18:07:40,269] Trial 1454 pruned. [I 2021-12-17 18:07:41,678] Trial 1455 pruned. [I 2021-12-17 18:07:45,356] Trial 1456 pruned. [I 2021-12-17 18:07:48,187] Trial 1457 pruned. [I 2021-12-17 18:12:47,074] Trial 1458 finished with value: 163.78514099121094 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 830, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 18:12:50,305] Trial 1459 pruned. [I 2021-12-17 18:13:00,047] Trial 1460 pruned. [I 2021-12-17 18:13:01,677] Trial 1461 pruned. [I 2021-12-17 18:13:08,163] Trial 1462 pruned. [I 2021-12-17 18:13:11,468] Trial 1463 pruned. [I 2021-12-17 18:13:17,852] Trial 1464 pruned. [I 2021-12-17 18:13:21,205] Trial 1465 pruned. [I 2021-12-17 18:13:24,477] Trial 1466 pruned. [I 2021-12-17 18:13:31,692] Trial 1467 pruned. [I 2021-12-17 18:13:32,708] Trial 1468 pruned. [I 2021-12-17 18:13:35,780] Trial 1469 pruned. [I 2021-12-17 18:13:39,593] Trial 1470 pruned. [I 2021-12-17 18:13:40,240] Trial 1471 pruned. [I 2021-12-17 18:13:54,360] Trial 1472 pruned. [I 2021-12-17 18:14:01,075] Trial 1473 pruned. [I 2021-12-17 18:14:05,848] Trial 1474 pruned. [I 2021-12-17 18:14:11,946] Trial 1475 pruned. [I 2021-12-17 18:14:14,374] Trial 1476 pruned. [I 2021-12-17 18:14:15,503] Trial 1477 pruned. [I 2021-12-17 18:14:17,014] Trial 1478 pruned. [I 2021-12-17 18:14:20,367] Trial 1479 pruned. [I 2021-12-17 18:14:26,760] Trial 1480 pruned. [I 2021-12-17 18:14:40,339] Trial 1481 pruned. [I 2021-12-17 18:14:43,146] Trial 1482 pruned. [I 2021-12-17 18:14:47,778] Trial 1483 pruned. [I 2021-12-17 18:14:50,858] Trial 1484 pruned. [I 2021-12-17 18:14:54,300] Trial 1485 pruned. [I 2021-12-17 18:14:55,818] Trial 1486 pruned. [I 2021-12-17 18:14:58,936] Trial 1487 pruned. [I 2021-12-17 18:15:00,763] Trial 1488 pruned. [I 2021-12-17 18:15:04,546] Trial 1489 pruned. [I 2021-12-17 18:15:07,832] Trial 1490 pruned. [I 2021-12-17 18:15:08,752] Trial 1491 pruned. [I 2021-12-17 18:15:12,848] Trial 1492 pruned. [I 2021-12-17 18:15:13,261] Trial 1493 pruned. [I 2021-12-17 18:15:14,092] Trial 1494 pruned. [I 2021-12-17 18:15:14,906] Trial 1495 pruned. [I 2021-12-17 18:15:19,322] Trial 1496 pruned. [I 2021-12-17 18:15:21,304] Trial 1497 pruned. [I 2021-12-17 18:15:22,838] Trial 1498 pruned. [I 2021-12-17 18:15:26,363] Trial 1499 pruned. [I 2021-12-17 18:15:27,641] Trial 1500 pruned. [I 2021-12-17 18:15:28,556] Trial 1501 pruned. [I 2021-12-17 18:15:30,196] Trial 1502 pruned. [I 2021-12-17 18:15:33,787] Trial 1503 pruned. [I 2021-12-17 18:15:37,161] Trial 1504 pruned. [I 2021-12-17 18:15:40,680] Trial 1505 pruned. [I 2021-12-17 18:15:42,087] Trial 1506 pruned. [I 2021-12-17 18:15:44,937] Trial 1507 pruned. [I 2021-12-17 18:15:47,885] Trial 1508 pruned. [I 2021-12-17 18:15:49,416] Trial 1509 pruned. [I 2021-12-17 18:15:51,344] Trial 1510 pruned. [I 2021-12-17 18:15:57,373] Trial 1511 pruned. [I 2021-12-17 18:16:00,100] Trial 1512 pruned. [I 2021-12-17 18:16:01,086] Trial 1513 pruned. [I 2021-12-17 18:16:02,516] Trial 1514 pruned. [I 2021-12-17 18:16:03,889] Trial 1515 pruned. [I 2021-12-17 18:16:04,564] Trial 1516 pruned. [I 2021-12-17 18:16:07,775] Trial 1517 pruned. [I 2021-12-17 18:16:09,692] Trial 1518 pruned. [I 2021-12-17 18:16:10,161] Trial 1519 pruned. [I 2021-12-17 18:16:19,868] Trial 1520 pruned. [I 2021-12-17 18:16:22,923] Trial 1521 pruned. [I 2021-12-17 18:16:26,673] Trial 1522 pruned. [I 2021-12-17 18:16:32,795] Trial 1523 pruned. [I 2021-12-17 18:16:36,042] Trial 1524 pruned. [I 2021-12-17 18:16:37,540] Trial 1525 pruned. [I 2021-12-17 18:16:39,214] Trial 1526 pruned. [I 2021-12-17 18:16:40,811] Trial 1527 pruned. [I 2021-12-17 18:16:44,310] Trial 1528 pruned. [I 2021-12-17 18:16:51,003] Trial 1529 pruned. [I 2021-12-17 18:16:52,614] Trial 1530 pruned. [I 2021-12-17 18:16:55,502] Trial 1531 pruned. [I 2021-12-17 18:17:01,332] Trial 1532 pruned. [I 2021-12-17 18:17:03,157] Trial 1533 pruned. [I 2021-12-17 18:17:04,896] Trial 1534 pruned. [I 2021-12-17 18:17:06,543] Trial 1535 pruned. [I 2021-12-17 18:17:08,758] Trial 1536 pruned. [I 2021-12-17 18:17:09,911] Trial 1537 pruned. [I 2021-12-17 18:17:11,633] Trial 1538 pruned. [I 2021-12-17 18:17:12,488] Trial 1539 pruned. [I 2021-12-17 18:17:13,784] Trial 1540 pruned. [I 2021-12-17 18:17:17,439] Trial 1541 pruned. [I 2021-12-17 18:17:25,676] Trial 1542 pruned. [I 2021-12-17 18:17:27,638] Trial 1543 pruned. [I 2021-12-17 18:17:28,092] Trial 1544 pruned. [I 2021-12-17 18:17:31,680] Trial 1545 pruned. [I 2021-12-17 18:17:34,256] Trial 1546 pruned. [I 2021-12-17 18:17:37,264] Trial 1547 pruned. [I 2021-12-17 18:17:40,556] Trial 1548 pruned. [I 2021-12-17 18:17:47,716] Trial 1549 pruned. [I 2021-12-17 18:17:49,303] Trial 1550 pruned. [I 2021-12-17 18:17:50,163] Trial 1551 pruned. [I 2021-12-17 18:17:51,124] Trial 1552 pruned. [I 2021-12-17 18:17:52,219] Trial 1553 pruned. [I 2021-12-17 18:17:55,433] Trial 1554 pruned. [I 2021-12-17 18:17:57,520] Trial 1555 pruned. [I 2021-12-17 18:17:58,888] Trial 1556 pruned. [I 2021-12-17 18:17:59,778] Trial 1557 pruned. [I 2021-12-17 18:18:01,809] Trial 1558 pruned. [I 2021-12-17 18:18:05,254] Trial 1559 pruned. [I 2021-12-17 18:18:10,247] Trial 1560 pruned. [I 2021-12-17 18:18:14,081] Trial 1561 pruned. [I 2021-12-17 18:18:15,815] Trial 1562 pruned. [I 2021-12-17 18:18:18,943] Trial 1563 pruned. [I 2021-12-17 18:18:19,338] Trial 1564 pruned. [I 2021-12-17 18:18:22,611] Trial 1565 pruned. [I 2021-12-17 18:18:26,331] Trial 1566 pruned. [I 2021-12-17 18:18:29,192] Trial 1567 pruned. [I 2021-12-17 18:18:33,201] Trial 1568 pruned. [I 2021-12-17 18:18:34,494] Trial 1569 pruned. [I 2021-12-17 18:18:34,955] Trial 1570 pruned. [I 2021-12-17 18:18:38,438] Trial 1571 pruned. [I 2021-12-17 18:18:41,647] Trial 1572 pruned. [I 2021-12-17 18:18:44,505] Trial 1573 pruned. [I 2021-12-17 18:18:46,333] Trial 1574 pruned. [I 2021-12-17 18:18:48,086] Trial 1575 pruned. [I 2021-12-17 18:18:50,278] Trial 1576 pruned. [I 2021-12-17 18:18:51,537] Trial 1577 pruned. [I 2021-12-17 18:18:53,166] Trial 1578 pruned. [I 2021-12-17 18:18:54,768] Trial 1579 pruned. [I 2021-12-17 18:18:56,574] Trial 1580 pruned. [I 2021-12-17 18:19:01,732] Trial 1581 pruned. [I 2021-12-17 18:19:02,387] Trial 1582 pruned. [I 2021-12-17 18:19:05,335] Trial 1583 pruned. [I 2021-12-17 18:19:06,819] Trial 1584 pruned. [I 2021-12-17 18:19:10,210] Trial 1585 pruned. [I 2021-12-17 18:19:16,869] Trial 1586 pruned. [I 2021-12-17 18:19:18,514] Trial 1587 pruned. [I 2021-12-17 18:19:18,897] Trial 1588 pruned. [I 2021-12-17 18:19:24,527] Trial 1589 pruned. [I 2021-12-17 18:19:26,181] Trial 1590 pruned. [I 2021-12-17 18:19:32,423] Trial 1591 pruned. [I 2021-12-17 18:19:32,908] Trial 1592 pruned. [I 2021-12-17 18:19:37,891] Trial 1593 pruned. [I 2021-12-17 18:19:39,393] Trial 1594 pruned. [I 2021-12-17 18:19:42,527] Trial 1595 pruned. [I 2021-12-17 18:19:46,115] Trial 1596 pruned. [I 2021-12-17 18:19:49,464] Trial 1597 pruned. [I 2021-12-17 18:19:51,106] Trial 1598 pruned. [I 2021-12-17 18:19:54,121] Trial 1599 pruned. [I 2021-12-17 18:19:55,100] Trial 1600 pruned. [I 2021-12-17 18:19:58,541] Trial 1601 pruned. [I 2021-12-17 18:20:01,507] Trial 1602 pruned. [I 2021-12-17 18:20:03,788] Trial 1603 pruned. [I 2021-12-17 18:20:07,979] Trial 1604 pruned. [I 2021-12-17 18:20:09,943] Trial 1605 pruned. [I 2021-12-17 18:20:11,755] Trial 1606 pruned. [I 2021-12-17 18:20:12,685] Trial 1607 pruned. [I 2021-12-17 18:20:14,173] Trial 1608 pruned. [I 2021-12-17 18:20:16,175] Trial 1609 pruned. [I 2021-12-17 18:20:18,003] Trial 1610 pruned. [I 2021-12-17 18:20:19,676] Trial 1611 pruned. [I 2021-12-17 18:20:20,974] Trial 1612 pruned. [I 2021-12-17 18:20:21,461] Trial 1613 pruned. [I 2021-12-17 18:20:24,478] Trial 1614 pruned. [I 2021-12-17 18:20:27,896] Trial 1615 pruned. [I 2021-12-17 18:20:31,838] Trial 1616 pruned. [I 2021-12-17 18:20:33,201] Trial 1617 pruned. [I 2021-12-17 18:20:35,250] Trial 1618 pruned. [I 2021-12-17 18:20:35,703] Trial 1619 pruned. [I 2021-12-17 18:20:41,056] Trial 1620 pruned. [I 2021-12-17 18:20:42,718] Trial 1621 pruned. [I 2021-12-17 18:20:46,022] Trial 1622 pruned. [I 2021-12-17 18:20:53,982] Trial 1623 pruned. [I 2021-12-17 18:20:55,843] Trial 1624 pruned. [I 2021-12-17 18:20:57,028] Trial 1625 pruned. [I 2021-12-17 18:21:03,080] Trial 1626 pruned. [I 2021-12-17 18:21:16,130] Trial 1627 pruned. [I 2021-12-17 18:21:17,200] Trial 1628 pruned. [I 2021-12-17 18:21:18,769] Trial 1629 pruned. [I 2021-12-17 18:21:19,694] Trial 1630 pruned. [I 2021-12-17 18:21:24,681] Trial 1631 pruned. [I 2021-12-17 18:21:31,562] Trial 1632 pruned. [I 2021-12-17 18:21:33,348] Trial 1633 pruned. [I 2021-12-17 18:21:35,218] Trial 1634 pruned. [I 2021-12-17 18:21:36,888] Trial 1635 pruned. [I 2021-12-17 18:21:37,868] Trial 1636 pruned. [I 2021-12-17 18:21:41,395] Trial 1637 pruned. [I 2021-12-17 18:21:42,364] Trial 1638 pruned. [I 2021-12-17 18:21:45,113] Trial 1639 pruned. [I 2021-12-17 18:21:45,507] Trial 1640 pruned. [I 2021-12-17 18:21:48,774] Trial 1641 pruned. [I 2021-12-17 18:21:53,658] Trial 1642 pruned. [I 2021-12-17 18:21:55,160] Trial 1643 pruned. [I 2021-12-17 18:21:59,316] Trial 1644 pruned. [I 2021-12-17 18:22:01,507] Trial 1645 pruned. [I 2021-12-17 18:22:05,472] Trial 1646 pruned. [I 2021-12-17 18:22:07,248] Trial 1647 pruned. [I 2021-12-17 18:22:09,944] Trial 1648 pruned. [I 2021-12-17 18:22:11,054] Trial 1649 pruned. [I 2021-12-17 18:22:14,028] Trial 1650 pruned. [I 2021-12-17 18:22:15,739] Trial 1651 pruned. [I 2021-12-17 18:22:16,743] Trial 1652 pruned. [I 2021-12-17 18:22:20,320] Trial 1653 pruned. [I 2021-12-17 18:22:21,300] Trial 1654 pruned. [I 2021-12-17 18:22:24,567] Trial 1655 pruned. [I 2021-12-17 18:22:28,179] Trial 1656 pruned. [I 2021-12-17 18:22:31,891] Trial 1657 pruned. [I 2021-12-17 18:22:34,090] Trial 1658 pruned. [I 2021-12-17 18:22:35,415] Trial 1659 pruned. [I 2021-12-17 18:22:36,152] Trial 1660 pruned. [I 2021-12-17 18:22:39,262] Trial 1661 pruned. [I 2021-12-17 18:22:40,976] Trial 1662 pruned. [I 2021-12-17 18:22:42,035] Trial 1663 pruned. [I 2021-12-17 18:22:42,514] Trial 1664 pruned. [I 2021-12-17 18:22:43,832] Trial 1665 pruned. [I 2021-12-17 18:22:48,556] Trial 1666 pruned. [I 2021-12-17 18:22:50,180] Trial 1667 pruned. [I 2021-12-17 18:22:52,008] Trial 1668 pruned. [I 2021-12-17 18:22:56,191] Trial 1669 pruned. [I 2021-12-17 18:22:57,822] Trial 1670 pruned. [I 2021-12-17 18:23:00,379] Trial 1671 pruned. [I 2021-12-17 18:23:02,048] Trial 1672 pruned. [I 2021-12-17 18:23:05,165] Trial 1673 pruned. [I 2021-12-17 18:23:06,587] Trial 1674 pruned. [I 2021-12-17 18:23:07,450] Trial 1675 pruned. [I 2021-12-17 18:23:09,243] Trial 1676 pruned. [I 2021-12-17 18:23:10,308] Trial 1677 pruned. [I 2021-12-17 18:23:13,605] Trial 1678 pruned. [I 2021-12-17 18:23:15,385] Trial 1679 pruned. [I 2021-12-17 18:23:21,200] Trial 1680 pruned. [I 2021-12-17 18:23:22,777] Trial 1681 pruned. [I 2021-12-17 18:23:25,999] Trial 1682 pruned. [I 2021-12-17 18:23:27,973] Trial 1683 pruned. [I 2021-12-17 18:23:28,583] Trial 1684 pruned. [I 2021-12-17 18:23:31,771] Trial 1685 pruned. [I 2021-12-17 18:23:38,217] Trial 1686 pruned. [I 2021-12-17 18:23:40,140] Trial 1687 pruned. [I 2021-12-17 18:23:40,581] Trial 1688 pruned. [I 2021-12-17 18:23:43,901] Trial 1689 pruned. [I 2021-12-17 18:23:47,380] Trial 1690 pruned. [I 2021-12-17 18:23:49,774] Trial 1691 pruned. [I 2021-12-17 18:23:53,195] Trial 1692 pruned. [I 2021-12-17 18:23:55,003] Trial 1693 pruned. [I 2021-12-17 18:23:56,512] Trial 1694 pruned. [I 2021-12-17 18:23:58,115] Trial 1695 pruned. [I 2021-12-17 18:24:01,724] Trial 1696 pruned. [I 2021-12-17 18:24:03,454] Trial 1697 pruned. [I 2021-12-17 18:24:15,717] Trial 1698 pruned. [I 2021-12-17 18:24:19,373] Trial 1699 pruned. [I 2021-12-17 18:24:20,496] Trial 1700 pruned. [I 2021-12-17 18:24:22,251] Trial 1701 pruned. [I 2021-12-17 18:24:22,973] Trial 1702 pruned. [I 2021-12-17 18:24:24,672] Trial 1703 pruned. [I 2021-12-17 18:24:31,334] Trial 1704 pruned. [I 2021-12-17 18:24:37,014] Trial 1705 pruned. [I 2021-12-17 18:24:42,872] Trial 1706 pruned. [I 2021-12-17 18:24:44,719] Trial 1707 pruned. [I 2021-12-17 18:24:45,313] Trial 1708 pruned. [I 2021-12-17 18:24:49,430] Trial 1709 pruned. [I 2021-12-17 18:24:52,963] Trial 1710 pruned. [I 2021-12-17 18:24:55,034] Trial 1711 pruned. [I 2021-12-17 18:24:55,766] Trial 1712 pruned. [I 2021-12-17 18:24:56,638] Trial 1713 pruned. [I 2021-12-17 18:25:01,822] Trial 1714 pruned. [I 2021-12-17 18:25:04,120] Trial 1715 pruned. [I 2021-12-17 18:25:05,790] Trial 1716 pruned. [I 2021-12-17 18:25:11,281] Trial 1717 pruned. [I 2021-12-17 18:25:14,172] Trial 1718 pruned. [I 2021-12-17 18:25:17,605] Trial 1719 pruned. [I 2021-12-17 18:25:19,270] Trial 1720 pruned. [I 2021-12-17 18:25:25,193] Trial 1721 pruned. [I 2021-12-17 18:25:27,358] Trial 1722 pruned. [I 2021-12-17 18:25:30,315] Trial 1723 pruned. [I 2021-12-17 18:25:32,108] Trial 1724 pruned. [I 2021-12-17 18:25:32,946] Trial 1725 pruned. [I 2021-12-17 18:25:39,197] Trial 1726 pruned. [I 2021-12-17 18:25:40,015] Trial 1727 pruned. [I 2021-12-17 18:25:54,001] Trial 1728 pruned. [I 2021-12-17 18:25:55,656] Trial 1729 pruned. [I 2021-12-17 18:25:57,397] Trial 1730 pruned. [I 2021-12-17 18:25:59,211] Trial 1731 pruned. [I 2021-12-17 18:26:00,331] Trial 1732 pruned. [I 2021-12-17 18:26:02,897] Trial 1733 pruned. [I 2021-12-17 18:26:04,513] Trial 1734 pruned. [I 2021-12-17 18:26:07,793] Trial 1735 pruned. [I 2021-12-17 18:26:08,576] Trial 1736 pruned. [I 2021-12-17 18:26:09,980] Trial 1737 pruned. [I 2021-12-17 18:26:11,861] Trial 1738 pruned. [I 2021-12-17 18:28:38,320] Trial 1739 finished with value: 192.83303833007812 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 328, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 18:28:39,697] Trial 1740 pruned. [I 2021-12-17 18:28:41,663] Trial 1741 pruned. [I 2021-12-17 18:28:45,001] Trial 1742 pruned. [I 2021-12-17 18:28:47,105] Trial 1743 pruned. [I 2021-12-17 18:28:48,851] Trial 1744 pruned. [I 2021-12-17 18:28:52,003] Trial 1745 pruned. [I 2021-12-17 18:28:54,851] Trial 1746 pruned. [I 2021-12-17 18:28:55,995] Trial 1747 pruned. [I 2021-12-17 18:28:57,656] Trial 1748 pruned. [I 2021-12-17 18:29:01,000] Trial 1749 pruned. [I 2021-12-17 18:29:01,605] Trial 1750 pruned. [I 2021-12-17 18:29:05,111] Trial 1751 pruned. [I 2021-12-17 18:29:06,220] Trial 1752 pruned. [I 2021-12-17 18:29:07,822] Trial 1753 pruned. [I 2021-12-17 18:29:09,850] Trial 1754 pruned. [I 2021-12-17 18:29:12,115] Trial 1755 pruned. [I 2021-12-17 18:29:15,558] Trial 1756 pruned. [I 2021-12-17 18:29:16,124] Trial 1757 pruned. [I 2021-12-17 18:29:19,264] Trial 1758 pruned. [I 2021-12-17 18:29:21,091] Trial 1759 pruned. [I 2021-12-17 18:29:24,719] Trial 1760 pruned. [I 2021-12-17 18:29:25,265] Trial 1761 pruned. [I 2021-12-17 18:34:23,210] Trial 1762 finished with value: 177.8464813232422 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 832, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 18:34:24,098] Trial 1763 pruned. [I 2021-12-17 18:34:25,753] Trial 1764 pruned. [I 2021-12-17 18:34:26,670] Trial 1765 pruned. [I 2021-12-17 18:34:28,482] Trial 1766 pruned. [I 2021-12-17 18:34:29,760] Trial 1767 pruned. [I 2021-12-17 18:34:31,688] Trial 1768 pruned. [I 2021-12-17 18:34:35,949] Trial 1769 pruned. [I 2021-12-17 18:34:39,169] Trial 1770 pruned. [I 2021-12-17 18:34:42,293] Trial 1771 pruned. [I 2021-12-17 18:34:44,119] Trial 1772 pruned. [I 2021-12-17 18:34:47,419] Trial 1773 pruned. [I 2021-12-17 18:34:49,582] Trial 1774 pruned. [I 2021-12-17 18:34:50,162] Trial 1775 pruned. [I 2021-12-17 18:34:51,141] Trial 1776 pruned. [I 2021-12-17 18:34:53,048] Trial 1777 pruned. [I 2021-12-17 18:34:54,715] Trial 1778 pruned. [I 2021-12-17 18:35:01,291] Trial 1779 pruned. [I 2021-12-17 18:35:10,989] Trial 1780 pruned. [I 2021-12-17 18:35:12,161] Trial 1781 pruned. [I 2021-12-17 18:35:19,229] Trial 1782 pruned. [I 2021-12-17 18:35:20,295] Trial 1783 pruned. [I 2021-12-17 18:35:20,690] Trial 1784 pruned. [I 2021-12-17 18:35:22,223] Trial 1785 pruned. [I 2021-12-17 18:35:23,894] Trial 1786 pruned. [I 2021-12-17 18:35:25,432] Trial 1787 pruned. [I 2021-12-17 18:35:31,700] Trial 1788 pruned. [I 2021-12-17 18:35:33,029] Trial 1789 pruned. [I 2021-12-17 18:35:34,882] Trial 1790 pruned. [I 2021-12-17 18:35:36,491] Trial 1791 pruned. [I 2021-12-17 18:35:39,964] Trial 1792 pruned. [I 2021-12-17 18:35:43,184] Trial 1793 pruned. [I 2021-12-17 18:35:44,987] Trial 1794 pruned. [I 2021-12-17 18:35:46,529] Trial 1795 pruned. [I 2021-12-17 18:35:56,738] Trial 1796 pruned. [I 2021-12-17 18:36:00,086] Trial 1797 pruned. [I 2021-12-17 18:36:01,776] Trial 1798 pruned. [I 2021-12-17 18:36:02,907] Trial 1799 pruned. [I 2021-12-17 18:36:04,546] Trial 1800 pruned. [I 2021-12-17 18:36:08,328] Trial 1801 pruned. [I 2021-12-17 18:36:09,433] Trial 1802 pruned. [I 2021-12-17 18:36:13,163] Trial 1803 pruned. [I 2021-12-17 18:36:17,993] Trial 1804 pruned. [I 2021-12-17 18:36:18,694] Trial 1805 pruned. [I 2021-12-17 18:36:21,837] Trial 1806 pruned. [I 2021-12-17 18:36:25,255] Trial 1807 pruned. [I 2021-12-17 18:36:29,852] Trial 1808 pruned. [I 2021-12-17 18:36:30,370] Trial 1809 pruned. [I 2021-12-17 18:36:33,665] Trial 1810 pruned. [I 2021-12-17 18:36:35,147] Trial 1811 pruned. [I 2021-12-17 18:36:36,186] Trial 1812 pruned. [I 2021-12-17 18:36:38,100] Trial 1813 pruned. [I 2021-12-17 18:36:44,148] Trial 1814 pruned. [I 2021-12-17 18:36:47,250] Trial 1815 pruned. [I 2021-12-17 18:36:49,134] Trial 1816 pruned. [I 2021-12-17 18:36:54,545] Trial 1817 pruned. [I 2021-12-17 18:36:56,233] Trial 1818 pruned. [I 2021-12-17 18:36:59,464] Trial 1819 pruned. [I 2021-12-17 18:37:01,261] Trial 1820 pruned. [I 2021-12-17 18:37:03,293] Trial 1821 pruned. [I 2021-12-17 18:37:06,386] Trial 1822 pruned. [I 2021-12-17 18:37:07,209] Trial 1823 pruned. [I 2021-12-17 18:37:13,554] Trial 1824 pruned. [I 2021-12-17 18:37:14,527] Trial 1825 pruned. [I 2021-12-17 18:37:20,167] Trial 1826 pruned. [I 2021-12-17 18:37:22,513] Trial 1827 pruned. [I 2021-12-17 18:37:27,076] Trial 1828 pruned. [I 2021-12-17 18:37:28,197] Trial 1829 pruned. [I 2021-12-17 18:37:35,569] Trial 1830 pruned. [I 2021-12-17 18:37:41,564] Trial 1831 pruned. [I 2021-12-17 18:37:42,858] Trial 1832 pruned. [I 2021-12-17 18:37:46,065] Trial 1833 pruned. [I 2021-12-17 18:37:49,415] Trial 1834 pruned. [I 2021-12-17 18:37:49,899] Trial 1835 pruned. [I 2021-12-17 18:37:53,581] Trial 1836 pruned. [I 2021-12-17 18:37:54,443] Trial 1837 pruned. [I 2021-12-17 18:38:01,888] Trial 1838 pruned. [I 2021-12-17 18:38:02,900] Trial 1839 pruned. [I 2021-12-17 18:38:04,090] Trial 1840 pruned. [I 2021-12-17 18:38:06,299] Trial 1841 pruned. [I 2021-12-17 18:38:08,456] Trial 1842 pruned. [I 2021-12-17 18:38:11,583] Trial 1843 pruned. [I 2021-12-17 18:38:14,605] Trial 1844 pruned. [I 2021-12-17 18:38:17,457] Trial 1845 pruned. [I 2021-12-17 18:38:18,173] Trial 1846 pruned. [I 2021-12-17 18:38:22,212] Trial 1847 pruned. [I 2021-12-17 18:41:14,723] Trial 1848 finished with value: 178.69178771972656 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 422, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 18:41:16,519] Trial 1849 pruned. [I 2021-12-17 18:41:17,537] Trial 1850 pruned. [I 2021-12-17 18:41:20,811] Trial 1851 pruned. [I 2021-12-17 18:41:23,977] Trial 1852 pruned. [I 2021-12-17 18:41:24,746] Trial 1853 pruned. [I 2021-12-17 18:41:28,055] Trial 1854 pruned. [I 2021-12-17 18:41:29,967] Trial 1855 pruned. [I 2021-12-17 18:41:31,876] Trial 1856 pruned. [I 2021-12-17 18:41:32,566] Trial 1857 pruned. [I 2021-12-17 18:41:34,188] Trial 1858 pruned. [I 2021-12-17 18:41:35,972] Trial 1859 pruned. [I 2021-12-17 18:41:39,595] Trial 1860 pruned. [I 2021-12-17 18:41:44,380] Trial 1861 pruned. [I 2021-12-17 18:41:45,622] Trial 1862 pruned. [I 2021-12-17 18:41:47,190] Trial 1863 pruned. [I 2021-12-17 18:41:48,774] Trial 1864 pruned. [I 2021-12-17 18:41:50,633] Trial 1865 pruned. [I 2021-12-17 18:41:53,767] Trial 1866 pruned. [I 2021-12-17 18:41:55,733] Trial 1867 pruned. [I 2021-12-17 18:41:57,050] Trial 1868 pruned. [I 2021-12-17 18:42:04,036] Trial 1869 pruned. [I 2021-12-17 18:42:09,689] Trial 1870 pruned. [I 2021-12-17 18:42:12,611] Trial 1871 pruned. [I 2021-12-17 18:42:13,803] Trial 1872 pruned. [I 2021-12-17 18:42:19,245] Trial 1873 pruned. [I 2021-12-17 18:42:23,233] Trial 1874 pruned. [I 2021-12-17 18:42:26,265] Trial 1875 pruned. [I 2021-12-17 18:42:28,027] Trial 1876 pruned. [I 2021-12-17 18:42:28,729] Trial 1877 pruned. [I 2021-12-17 18:42:32,368] Trial 1878 pruned. [I 2021-12-17 18:42:35,444] Trial 1879 pruned. [I 2021-12-17 18:42:40,345] Trial 1880 pruned. [I 2021-12-17 18:42:40,811] Trial 1881 pruned. [I 2021-12-17 18:42:45,985] Trial 1882 pruned. [I 2021-12-17 18:42:51,784] Trial 1883 pruned. [I 2021-12-17 18:45:06,961] Trial 1884 finished with value: 163.8708953857422 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 280, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 18:45:08,384] Trial 1885 pruned. [I 2021-12-17 18:45:10,095] Trial 1886 pruned. [I 2021-12-17 18:45:10,944] Trial 1887 pruned. [I 2021-12-17 18:45:12,383] Trial 1888 pruned. [I 2021-12-17 18:45:15,145] Trial 1889 pruned. [I 2021-12-17 18:45:16,495] Trial 1890 pruned. [I 2021-12-17 18:45:18,515] Trial 1891 pruned. [I 2021-12-17 18:45:19,983] Trial 1892 pruned. [I 2021-12-17 18:45:22,289] Trial 1893 pruned. [I 2021-12-17 18:45:25,123] Trial 1894 pruned. [I 2021-12-17 18:45:26,005] Trial 1895 pruned. [I 2021-12-17 18:45:27,458] Trial 1896 pruned. [I 2021-12-17 18:45:28,849] Trial 1897 pruned. [I 2021-12-17 18:45:31,587] Trial 1898 pruned. [I 2021-12-17 18:45:33,098] Trial 1899 pruned. [I 2021-12-17 18:45:33,985] Trial 1900 pruned. [I 2021-12-17 18:45:34,597] Trial 1901 pruned. [I 2021-12-17 18:45:37,667] Trial 1902 pruned. [I 2021-12-17 18:45:38,931] Trial 1903 pruned. [I 2021-12-17 18:45:40,388] Trial 1904 pruned. [I 2021-12-17 18:45:40,904] Trial 1905 pruned. [I 2021-12-17 18:45:43,750] Trial 1906 pruned. [I 2021-12-17 18:45:46,744] Trial 1907 pruned. [I 2021-12-17 18:45:48,114] Trial 1908 pruned. [I 2021-12-17 18:45:49,459] Trial 1909 pruned. [I 2021-12-17 18:45:50,929] Trial 1910 pruned. [I 2021-12-17 18:45:52,501] Trial 1911 pruned. [I 2021-12-17 18:45:53,459] Trial 1912 pruned. [I 2021-12-17 18:45:55,045] Trial 1913 pruned. [I 2021-12-17 18:45:56,678] Trial 1914 pruned. [I 2021-12-17 18:45:57,880] Trial 1915 pruned. [I 2021-12-17 18:46:00,926] Trial 1916 pruned. [I 2021-12-17 18:46:02,469] Trial 1917 pruned. [I 2021-12-17 18:46:03,426] Trial 1918 pruned. [I 2021-12-17 18:46:06,426] Trial 1919 pruned. [I 2021-12-17 18:46:07,732] Trial 1920 pruned. [I 2021-12-17 18:46:10,298] Trial 1921 pruned. [I 2021-12-17 18:46:13,488] Trial 1922 pruned. [I 2021-12-17 18:46:15,081] Trial 1923 pruned. [I 2021-12-17 18:46:15,946] Trial 1924 pruned. [I 2021-12-17 18:46:16,970] Trial 1925 pruned. [I 2021-12-17 18:46:18,607] Trial 1926 pruned. [I 2021-12-17 18:46:20,246] Trial 1927 pruned. [I 2021-12-17 18:46:21,455] Trial 1928 pruned. [I 2021-12-17 18:46:22,171] Trial 1929 pruned. [I 2021-12-17 18:46:23,976] Trial 1930 pruned. [I 2021-12-17 18:46:26,617] Trial 1931 pruned. [I 2021-12-17 18:46:29,600] Trial 1932 pruned. [I 2021-12-17 18:46:31,953] Trial 1933 pruned. [I 2021-12-17 18:46:34,965] Trial 1934 pruned. [I 2021-12-17 18:46:38,256] Trial 1935 pruned. [I 2021-12-17 18:46:41,147] Trial 1936 pruned. [I 2021-12-17 18:46:42,101] Trial 1937 pruned. [I 2021-12-17 18:46:45,128] Trial 1938 pruned. [I 2021-12-17 18:46:48,517] Trial 1939 pruned. [I 2021-12-17 18:46:50,197] Trial 1940 pruned. [I 2021-12-17 18:46:51,636] Trial 1941 pruned. [I 2021-12-17 18:46:52,397] Trial 1942 pruned. [I 2021-12-17 18:46:54,749] Trial 1943 pruned. [I 2021-12-17 18:46:56,369] Trial 1944 pruned. [I 2021-12-17 18:46:57,958] Trial 1945 pruned. [I 2021-12-17 18:46:59,513] Trial 1946 pruned. [I 2021-12-17 18:47:01,282] Trial 1947 pruned. [I 2021-12-17 18:47:04,717] Trial 1948 pruned. [I 2021-12-17 18:47:05,140] Trial 1949 pruned. [I 2021-12-17 18:47:09,772] Trial 1950 pruned. [I 2021-12-17 18:47:11,787] Trial 1951 pruned. [I 2021-12-17 18:47:14,456] Trial 1952 pruned. [I 2021-12-17 18:47:17,332] Trial 1953 pruned. [I 2021-12-17 18:47:17,808] Trial 1954 pruned. [I 2021-12-17 18:47:18,975] Trial 1955 pruned. [I 2021-12-17 18:47:22,645] Trial 1956 pruned. [I 2021-12-17 18:47:32,804] Trial 1957 pruned. [I 2021-12-17 18:47:34,299] Trial 1958 pruned. [I 2021-12-17 18:47:37,942] Trial 1959 pruned. [I 2021-12-17 18:47:39,556] Trial 1960 pruned. [I 2021-12-17 18:47:42,859] Trial 1961 pruned. [I 2021-12-17 18:47:47,142] Trial 1962 pruned. [I 2021-12-17 18:47:48,106] Trial 1963 pruned. [I 2021-12-17 18:47:52,074] Trial 1964 pruned. [I 2021-12-17 18:47:53,205] Trial 1965 pruned. [I 2021-12-17 18:47:56,183] Trial 1966 pruned. [I 2021-12-17 18:47:57,172] Trial 1967 pruned. [I 2021-12-17 18:48:08,481] Trial 1968 pruned. [I 2021-12-17 18:48:10,945] Trial 1969 pruned. [I 2021-12-17 18:48:12,644] Trial 1970 pruned. [I 2021-12-17 18:48:18,132] Trial 1971 pruned. [I 2021-12-17 18:48:20,083] Trial 1972 pruned. [I 2021-12-17 18:48:22,068] Trial 1973 pruned. [I 2021-12-17 18:48:22,476] Trial 1974 pruned. [I 2021-12-17 18:48:25,902] Trial 1975 pruned. [I 2021-12-17 18:48:28,944] Trial 1976 pruned. [I 2021-12-17 18:48:29,997] Trial 1977 pruned. [I 2021-12-17 18:48:30,466] Trial 1978 pruned. [I 2021-12-17 18:48:33,939] Trial 1979 pruned. [I 2021-12-17 18:48:50,522] Trial 1980 pruned. [I 2021-12-17 18:48:55,309] Trial 1981 pruned. [I 2021-12-17 18:48:56,733] Trial 1982 pruned. [I 2021-12-17 18:48:58,288] Trial 1983 pruned. [I 2021-12-17 18:49:00,162] Trial 1984 pruned. [I 2021-12-17 18:49:03,476] Trial 1985 pruned. [I 2021-12-17 18:49:04,749] Trial 1986 pruned. [I 2021-12-17 18:49:05,747] Trial 1987 pruned. [I 2021-12-17 18:49:07,295] Trial 1988 pruned. [I 2021-12-17 18:49:10,050] Trial 1989 pruned. [I 2021-12-17 18:49:13,350] Trial 1990 pruned. [I 2021-12-17 18:49:14,898] Trial 1991 pruned. [I 2021-12-17 18:49:15,661] Trial 1992 pruned. [I 2021-12-17 18:49:22,741] Trial 1993 pruned. [I 2021-12-17 18:49:24,268] Trial 1994 pruned. [I 2021-12-17 18:49:26,571] Trial 1995 pruned. [I 2021-12-17 18:49:28,450] Trial 1996 pruned. [I 2021-12-17 18:49:30,468] Trial 1997 pruned. [I 2021-12-17 18:49:33,557] Trial 1998 pruned. [I 2021-12-17 18:49:34,673] Trial 1999 pruned. [I 2021-12-17 18:49:35,636] Trial 2000 pruned. [I 2021-12-17 18:49:36,116] Trial 2001 pruned. [I 2021-12-17 18:49:37,999] Trial 2002 pruned. [I 2021-12-17 18:49:41,314] Trial 2003 pruned. [I 2021-12-17 18:49:43,003] Trial 2004 pruned. [I 2021-12-17 18:49:44,517] Trial 2005 pruned. [I 2021-12-17 18:49:46,151] Trial 2006 pruned. [I 2021-12-17 18:49:47,267] Trial 2007 pruned. [I 2021-12-17 18:49:50,749] Trial 2008 pruned. [I 2021-12-17 18:49:52,477] Trial 2009 pruned. [I 2021-12-17 18:49:54,241] Trial 2010 pruned. [I 2021-12-17 18:49:55,114] Trial 2011 pruned. [I 2021-12-17 18:49:58,855] Trial 2012 pruned. [I 2021-12-17 18:50:02,661] Trial 2013 pruned. [I 2021-12-17 18:50:06,073] Trial 2014 pruned. [I 2021-12-17 18:50:06,996] Trial 2015 pruned. [I 2021-12-17 18:50:10,185] Trial 2016 pruned. [I 2021-12-17 18:50:11,983] Trial 2017 pruned. [I 2021-12-17 18:50:13,868] Trial 2018 pruned. [I 2021-12-17 18:50:15,576] Trial 2019 pruned. [I 2021-12-17 18:50:17,160] Trial 2020 pruned. [I 2021-12-17 18:50:19,725] Trial 2021 pruned. [I 2021-12-17 18:50:24,010] Trial 2022 pruned. [I 2021-12-17 18:50:24,626] Trial 2023 pruned. [I 2021-12-17 18:50:25,859] Trial 2024 pruned. [I 2021-12-17 18:50:29,354] Trial 2025 pruned. [I 2021-12-17 18:50:32,647] Trial 2026 pruned. [I 2021-12-17 18:50:33,108] Trial 2027 pruned. [I 2021-12-17 18:50:37,837] Trial 2028 pruned. [I 2021-12-17 18:50:39,086] Trial 2029 pruned. [I 2021-12-17 18:50:45,824] Trial 2030 pruned. [I 2021-12-17 18:50:47,182] Trial 2031 pruned. [I 2021-12-17 18:50:49,030] Trial 2032 pruned. [I 2021-12-17 18:50:54,615] Trial 2033 pruned. [I 2021-12-17 18:50:56,162] Trial 2034 pruned. [I 2021-12-17 18:50:59,525] Trial 2035 pruned. [I 2021-12-17 18:51:00,427] Trial 2036 pruned. [I 2021-12-17 18:51:01,466] Trial 2037 pruned. [I 2021-12-17 18:51:03,247] Trial 2038 pruned. [I 2021-12-17 18:51:04,398] Trial 2039 pruned. [I 2021-12-17 18:51:06,438] Trial 2040 pruned. [I 2021-12-17 18:51:08,155] Trial 2041 pruned. [I 2021-12-17 18:51:09,766] Trial 2042 pruned. [I 2021-12-17 18:51:13,203] Trial 2043 pruned. [I 2021-12-17 18:51:15,056] Trial 2044 pruned. [I 2021-12-17 18:51:17,308] Trial 2045 pruned. [I 2021-12-17 18:51:17,869] Trial 2046 pruned. [I 2021-12-17 18:51:19,674] Trial 2047 pruned. [I 2021-12-17 18:51:21,804] Trial 2048 pruned. [I 2021-12-17 18:51:23,281] Trial 2049 pruned. [I 2021-12-17 18:51:23,778] Trial 2050 pruned. [I 2021-12-17 18:51:25,380] Trial 2051 pruned. [I 2021-12-17 18:51:27,367] Trial 2052 pruned. [I 2021-12-17 18:51:31,934] Trial 2053 pruned. [I 2021-12-17 18:51:33,626] Trial 2054 pruned. [I 2021-12-17 18:57:29,656] Trial 2055 finished with value: 165.3345489501953 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 982, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 18:57:32,303] Trial 2056 pruned. [I 2021-12-17 18:57:43,317] Trial 2057 pruned. [I 2021-12-17 18:57:48,440] Trial 2058 pruned. [I 2021-12-17 18:57:50,129] Trial 2059 pruned. [I 2021-12-17 18:57:53,631] Trial 2060 pruned. [I 2021-12-17 18:57:56,818] Trial 2061 pruned. [I 2021-12-17 18:57:57,710] Trial 2062 pruned. [I 2021-12-17 18:58:01,377] Trial 2063 pruned. [I 2021-12-17 18:58:04,132] Trial 2064 pruned. [I 2021-12-17 18:58:06,106] Trial 2065 pruned. [I 2021-12-17 18:58:08,660] Trial 2066 pruned. [I 2021-12-17 18:58:12,325] Trial 2067 pruned. [I 2021-12-17 18:58:20,249] Trial 2068 pruned. [I 2021-12-17 18:58:21,723] Trial 2069 pruned. [I 2021-12-17 18:58:25,321] Trial 2070 pruned. [I 2021-12-17 18:58:26,267] Trial 2071 pruned. [I 2021-12-17 18:58:28,750] Trial 2072 pruned. [I 2021-12-17 18:58:34,395] Trial 2073 pruned. [I 2021-12-17 18:58:35,203] Trial 2074 pruned. [I 2021-12-17 18:58:36,674] Trial 2075 pruned. [I 2021-12-17 18:58:45,268] Trial 2076 pruned. [I 2021-12-17 18:59:01,210] Trial 2077 pruned. [I 2021-12-17 19:04:54,855] Trial 2078 finished with value: 163.30410766601562 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 976, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 19:04:57,698] Trial 2079 pruned. [I 2021-12-17 19:05:01,257] Trial 2080 pruned. [I 2021-12-17 19:05:04,120] Trial 2081 pruned. [I 2021-12-17 19:05:10,685] Trial 2082 pruned. [I 2021-12-17 19:05:14,521] Trial 2083 pruned. [I 2021-12-17 19:05:16,590] Trial 2084 pruned. [I 2021-12-17 19:05:18,171] Trial 2085 pruned. [I 2021-12-17 19:05:23,005] Trial 2086 pruned. [I 2021-12-17 19:05:26,435] Trial 2087 pruned. [I 2021-12-17 19:05:26,971] Trial 2088 pruned. [I 2021-12-17 19:05:33,516] Trial 2089 pruned. [I 2021-12-17 19:05:39,364] Trial 2090 pruned. [I 2021-12-17 19:05:46,882] Trial 2091 pruned. [I 2021-12-17 19:05:48,576] Trial 2092 pruned. [I 2021-12-17 19:05:50,207] Trial 2093 pruned. [I 2021-12-17 19:05:53,761] Trial 2094 pruned. [I 2021-12-17 19:05:54,833] Trial 2095 pruned. [I 2021-12-17 19:05:57,525] Trial 2096 pruned. [I 2021-12-17 19:06:04,679] Trial 2097 pruned. [I 2021-12-17 19:06:12,431] Trial 2098 pruned. [I 2021-12-17 19:06:13,946] Trial 2099 pruned. [I 2021-12-17 19:06:15,348] Trial 2100 pruned. [I 2021-12-17 19:06:22,580] Trial 2101 pruned. [I 2021-12-17 19:06:25,447] Trial 2102 pruned. [I 2021-12-17 19:06:27,180] Trial 2103 pruned. [I 2021-12-17 19:06:37,133] Trial 2104 pruned. [I 2021-12-17 19:06:38,721] Trial 2105 pruned. [I 2021-12-17 19:06:42,061] Trial 2106 pruned. [I 2021-12-17 19:06:45,055] Trial 2107 pruned. [I 2021-12-17 19:06:47,295] Trial 2108 pruned. [I 2021-12-17 19:06:53,586] Trial 2109 pruned. [I 2021-12-17 19:06:54,922] Trial 2110 pruned. [I 2021-12-17 19:07:02,179] Trial 2111 pruned. [I 2021-12-17 19:07:08,322] Trial 2112 pruned. [I 2021-12-17 19:07:09,308] Trial 2113 pruned. [I 2021-12-17 19:07:11,080] Trial 2114 pruned. [I 2021-12-17 19:07:12,657] Trial 2115 pruned. [I 2021-12-17 19:07:13,814] Trial 2116 pruned. [I 2021-12-17 19:07:15,628] Trial 2117 pruned. [I 2021-12-17 19:07:20,060] Trial 2118 pruned. [I 2021-12-17 19:07:20,732] Trial 2119 pruned. [I 2021-12-17 19:07:23,935] Trial 2120 pruned. [I 2021-12-17 19:07:31,559] Trial 2121 pruned. [I 2021-12-17 19:07:34,425] Trial 2122 pruned. [I 2021-12-17 19:07:35,647] Trial 2123 pruned. [I 2021-12-17 19:07:38,048] Trial 2124 pruned. [I 2021-12-17 19:07:38,558] Trial 2125 pruned. [I 2021-12-17 19:07:43,301] Trial 2126 pruned. [I 2021-12-17 19:07:49,535] Trial 2127 pruned. [I 2021-12-17 19:07:52,815] Trial 2128 pruned. [I 2021-12-17 19:07:54,479] Trial 2129 pruned. [I 2021-12-17 19:07:55,794] Trial 2130 pruned. [I 2021-12-17 19:07:58,850] Trial 2131 pruned. [I 2021-12-17 19:08:00,549] Trial 2132 pruned. [I 2021-12-17 19:08:03,539] Trial 2133 pruned. [I 2021-12-17 19:08:05,527] Trial 2134 pruned. [I 2021-12-17 19:08:08,253] Trial 2135 pruned. [I 2021-12-17 19:08:09,287] Trial 2136 pruned. [I 2021-12-17 19:08:12,164] Trial 2137 pruned. [I 2021-12-17 19:08:13,859] Trial 2138 pruned. [I 2021-12-17 19:08:15,382] Trial 2139 pruned. [I 2021-12-17 19:08:17,140] Trial 2140 pruned. [I 2021-12-17 19:08:20,157] Trial 2141 pruned. [I 2021-12-17 19:08:21,254] Trial 2142 pruned. [I 2021-12-17 19:08:21,908] Trial 2143 pruned. [I 2021-12-17 19:08:23,532] Trial 2144 pruned. [I 2021-12-17 19:08:26,801] Trial 2145 pruned. [I 2021-12-17 19:08:28,309] Trial 2146 pruned. [I 2021-12-17 19:08:28,830] Trial 2147 pruned. [I 2021-12-17 19:08:30,703] Trial 2148 pruned. [I 2021-12-17 19:08:31,658] Trial 2149 pruned. [I 2021-12-17 19:08:32,836] Trial 2150 pruned. [I 2021-12-17 19:08:36,427] Trial 2151 pruned. [I 2021-12-17 19:08:37,733] Trial 2152 pruned. [I 2021-12-17 19:08:40,536] Trial 2153 pruned. [I 2021-12-17 19:08:42,171] Trial 2154 pruned. [I 2021-12-17 19:08:45,985] Trial 2155 pruned. [I 2021-12-17 19:08:48,893] Trial 2156 pruned. [I 2021-12-17 19:08:50,558] Trial 2157 pruned. [I 2021-12-17 19:08:52,528] Trial 2158 pruned. [I 2021-12-17 19:08:55,778] Trial 2159 pruned. [I 2021-12-17 19:08:57,891] Trial 2160 pruned. [I 2021-12-17 19:08:59,829] Trial 2161 pruned. [I 2021-12-17 19:09:00,433] Trial 2162 pruned. [I 2021-12-17 19:09:01,720] Trial 2163 pruned. [I 2021-12-17 19:09:05,204] Trial 2164 pruned. [I 2021-12-17 19:09:08,643] Trial 2165 pruned. [I 2021-12-17 19:09:11,236] Trial 2166 pruned. [I 2021-12-17 19:09:14,776] Trial 2167 pruned. [I 2021-12-17 19:09:15,832] Trial 2168 pruned. [I 2021-12-17 19:09:17,457] Trial 2169 pruned. [I 2021-12-17 19:09:22,218] Trial 2170 pruned. [I 2021-12-17 19:09:24,254] Trial 2171 pruned. [I 2021-12-17 19:09:27,799] Trial 2172 pruned. [I 2021-12-17 19:09:30,834] Trial 2173 pruned. [I 2021-12-17 19:09:31,185] Trial 2174 pruned. [I 2021-12-17 19:09:32,570] Trial 2175 pruned. [I 2021-12-17 19:09:34,314] Trial 2176 pruned. [I 2021-12-17 19:09:35,751] Trial 2177 pruned. [I 2021-12-17 19:09:38,627] Trial 2178 pruned. [I 2021-12-17 19:09:41,808] Trial 2179 pruned. [I 2021-12-17 19:09:43,657] Trial 2180 pruned. [I 2021-12-17 19:09:45,151] Trial 2181 pruned. [I 2021-12-17 19:09:48,177] Trial 2182 pruned. [I 2021-12-17 19:09:50,168] Trial 2183 pruned. [I 2021-12-17 19:09:51,161] Trial 2184 pruned. [I 2021-12-17 19:09:57,867] Trial 2185 pruned. [I 2021-12-17 19:09:59,557] Trial 2186 pruned. [I 2021-12-17 19:10:01,132] Trial 2187 pruned. [I 2021-12-17 19:10:02,844] Trial 2188 pruned. [I 2021-12-17 19:10:04,521] Trial 2189 pruned. [I 2021-12-17 19:10:06,446] Trial 2190 pruned. [I 2021-12-17 19:10:07,564] Trial 2191 pruned. [I 2021-12-17 19:10:08,851] Trial 2192 pruned. [I 2021-12-17 19:10:10,535] Trial 2193 pruned. [I 2021-12-17 19:10:12,758] Trial 2194 pruned. [I 2021-12-17 19:10:14,301] Trial 2195 pruned. [I 2021-12-17 19:10:14,865] Trial 2196 pruned. [I 2021-12-17 19:10:16,496] Trial 2197 pruned. [I 2021-12-17 19:10:21,101] Trial 2198 pruned. [I 2021-12-17 19:10:22,106] Trial 2199 pruned. [I 2021-12-17 19:10:23,796] Trial 2200 pruned. [I 2021-12-17 19:10:27,960] Trial 2201 pruned. [I 2021-12-17 19:10:29,649] Trial 2202 pruned. [I 2021-12-17 19:10:32,708] Trial 2203 pruned. [I 2021-12-17 19:10:33,793] Trial 2204 pruned. [I 2021-12-17 19:10:37,059] Trial 2205 pruned. [I 2021-12-17 19:10:39,976] Trial 2206 pruned. [I 2021-12-17 19:10:41,860] Trial 2207 pruned. [I 2021-12-17 19:10:42,916] Trial 2208 pruned. [I 2021-12-17 19:10:52,281] Trial 2209 pruned. [I 2021-12-17 19:10:53,947] Trial 2210 pruned. [I 2021-12-17 19:10:55,000] Trial 2211 pruned. [I 2021-12-17 19:10:58,031] Trial 2212 pruned. [I 2021-12-17 19:10:59,854] Trial 2213 pruned. [I 2021-12-17 19:11:01,187] Trial 2214 pruned. [I 2021-12-17 19:11:04,456] Trial 2215 pruned. [I 2021-12-17 19:11:05,073] Trial 2216 pruned. [I 2021-12-17 19:11:08,780] Trial 2217 pruned. [I 2021-12-17 19:11:10,092] Trial 2218 pruned. [I 2021-12-17 19:11:10,809] Trial 2219 pruned. [I 2021-12-17 19:11:12,656] Trial 2220 pruned. [I 2021-12-17 19:11:14,304] Trial 2221 pruned. [I 2021-12-17 19:11:16,080] Trial 2222 pruned. [I 2021-12-17 19:13:38,070] Trial 2223 finished with value: 166.26084899902344 and parameters: {'batch_size': 16, 'n_hdn_layers': 5, 'neurons_HL1': 298, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'relu'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 19:13:39,487] Trial 2224 pruned. [I 2021-12-17 19:13:42,958] Trial 2225 pruned. [I 2021-12-17 19:13:44,848] Trial 2226 pruned. [I 2021-12-17 19:13:48,210] Trial 2227 pruned. [I 2021-12-17 19:13:52,379] Trial 2228 pruned. [I 2021-12-17 19:13:53,798] Trial 2229 pruned. [I 2021-12-17 19:16:57,540] Trial 2230 finished with value: 166.54754638671875 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 470, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 19:16:59,201] Trial 2231 pruned. [I 2021-12-17 19:17:00,815] Trial 2232 pruned. [I 2021-12-17 19:17:04,379] Trial 2233 pruned. [I 2021-12-17 19:17:05,638] Trial 2234 pruned. [I 2021-12-17 19:17:07,507] Trial 2235 pruned. [I 2021-12-17 19:17:08,522] Trial 2236 pruned. [I 2021-12-17 19:17:10,212] Trial 2237 pruned. [I 2021-12-17 19:17:13,819] Trial 2238 pruned. [I 2021-12-17 19:17:14,515] Trial 2239 pruned. [I 2021-12-17 19:17:17,984] Trial 2240 pruned. [I 2021-12-17 19:17:24,784] Trial 2241 pruned. [I 2021-12-17 19:17:28,875] Trial 2242 pruned. [I 2021-12-17 19:17:29,349] Trial 2243 pruned. [I 2021-12-17 19:17:31,037] Trial 2244 pruned. [I 2021-12-17 19:17:32,618] Trial 2245 pruned. [I 2021-12-17 19:17:35,654] Trial 2246 pruned. [I 2021-12-17 19:17:39,530] Trial 2247 pruned. [I 2021-12-17 19:17:41,332] Trial 2248 pruned. [I 2021-12-17 19:17:42,569] Trial 2249 pruned. [I 2021-12-17 19:17:45,977] Trial 2250 pruned. [I 2021-12-17 19:17:53,178] Trial 2251 pruned. [I 2021-12-17 19:17:54,970] Trial 2252 pruned. [I 2021-12-17 19:17:56,922] Trial 2253 pruned. [I 2021-12-17 19:17:58,669] Trial 2254 pruned. [I 2021-12-17 19:18:01,492] Trial 2255 pruned. [I 2021-12-17 19:18:08,809] Trial 2256 pruned. [I 2021-12-17 19:20:57,016] Trial 2257 finished with value: 165.5403289794922 and parameters: {'batch_size': 16, 'n_hdn_layers': 2, 'neurons_HL1': 456, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 19:20:58,749] Trial 2258 pruned. [I 2021-12-17 19:21:02,258] Trial 2259 pruned. [I 2021-12-17 19:21:06,153] Trial 2260 pruned. [I 2021-12-17 19:21:08,492] Trial 2261 pruned. [I 2021-12-17 19:21:12,136] Trial 2262 pruned. [I 2021-12-17 19:21:12,837] Trial 2263 pruned. [I 2021-12-17 19:21:14,594] Trial 2264 pruned. [I 2021-12-17 19:21:20,369] Trial 2265 pruned. [I 2021-12-17 19:21:22,419] Trial 2266 pruned. [I 2021-12-17 19:21:22,984] Trial 2267 pruned. [I 2021-12-17 19:21:26,231] Trial 2268 pruned. [I 2021-12-17 19:21:28,344] Trial 2269 pruned. [I 2021-12-17 19:21:29,984] Trial 2270 pruned. [I 2021-12-17 19:21:31,827] Trial 2271 pruned. [I 2021-12-17 19:24:07,176] Trial 2272 finished with value: 168.25482177734375 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 366, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 482 with value: 159.2676239013672. [I 2021-12-17 19:24:08,691] Trial 2273 pruned. [I 2021-12-17 19:24:10,342] Trial 2274 pruned. [I 2021-12-17 19:24:13,345] Trial 2275 pruned. [I 2021-12-17 19:24:15,087] Trial 2276 pruned. [I 2021-12-17 19:24:16,108] Trial 2277 pruned. [I 2021-12-17 19:24:19,433] Trial 2278 pruned. [I 2021-12-17 19:24:21,071] Trial 2279 pruned. [I 2021-12-17 19:24:22,453] Trial 2280 pruned. [I 2021-12-17 19:24:34,313] Trial 2281 pruned. [I 2021-12-17 19:24:36,396] Trial 2282 pruned. [I 2021-12-17 19:24:39,853] Trial 2283 pruned. [I 2021-12-17 19:24:41,569] Trial 2284 pruned. [I 2021-12-17 19:24:42,774] Trial 2285 pruned. [I 2021-12-17 19:24:46,467] Trial 2286 pruned. [I 2021-12-17 19:24:48,202] Trial 2287 pruned. [I 2021-12-17 19:24:50,069] Trial 2288 pruned. [I 2021-12-17 19:24:51,477] Trial 2289 pruned. [I 2021-12-17 19:24:52,083] Trial 2290 pruned. [I 2021-12-17 19:24:52,666] Trial 2291 pruned. [I 2021-12-17 19:24:55,997] Trial 2292 pruned. [I 2021-12-17 19:24:58,495] Trial 2293 pruned. [I 2021-12-17 19:25:02,259] Trial 2294 pruned. [I 2021-12-17 19:25:07,309] Trial 2295 pruned. [I 2021-12-17 19:25:14,531] Trial 2296 pruned. [I 2021-12-17 19:25:16,197] Trial 2297 pruned. [I 2021-12-17 19:25:17,613] Trial 2298 pruned. [I 2021-12-17 19:25:23,025] Trial 2299 pruned. [I 2021-12-17 19:25:27,441] Trial 2300 pruned. [I 2021-12-17 19:25:29,480] Trial 2301 pruned. [I 2021-12-17 19:25:31,675] Trial 2302 pruned. [I 2021-12-17 19:25:33,369] Trial 2303 pruned. [I 2021-12-17 19:25:35,265] Trial 2304 pruned. [I 2021-12-17 19:25:36,517] Trial 2305 pruned. [I 2021-12-17 19:25:38,065] Trial 2306 pruned. [I 2021-12-17 19:25:42,157] Trial 2307 pruned. [I 2021-12-17 19:25:44,037] Trial 2308 pruned. [I 2021-12-17 19:25:45,729] Trial 2309 pruned. [I 2021-12-17 19:25:47,555] Trial 2310 pruned. [I 2021-12-17 19:25:49,051] Trial 2311 pruned. [I 2021-12-17 19:25:49,710] Trial 2312 pruned. [I 2021-12-17 19:25:53,532] Trial 2313 pruned. [I 2021-12-17 19:25:57,565] Trial 2314 pruned. [I 2021-12-17 19:25:58,326] Trial 2315 pruned. [I 2021-12-17 19:26:01,678] Trial 2316 pruned. [I 2021-12-17 19:26:03,450] Trial 2317 pruned. [I 2021-12-17 19:26:05,397] Trial 2318 pruned. [I 2021-12-17 19:26:07,746] Trial 2319 pruned. [I 2021-12-17 19:26:10,781] Trial 2320 pruned. [I 2021-12-17 19:26:12,637] Trial 2321 pruned. [I 2021-12-17 19:26:14,245] Trial 2322 pruned. [I 2021-12-17 19:26:15,395] Trial 2323 pruned. [I 2021-12-17 19:26:19,776] Trial 2324 pruned. [I 2021-12-17 19:26:22,268] Trial 2325 pruned. [I 2021-12-17 19:26:24,055] Trial 2326 pruned. [I 2021-12-17 19:26:25,627] Trial 2327 pruned. [I 2021-12-17 19:26:27,238] Trial 2328 pruned. [I 2021-12-17 19:26:29,127] Trial 2329 pruned. [I 2021-12-17 19:26:30,843] Trial 2330 pruned. [I 2021-12-17 19:26:35,437] Trial 2331 pruned. [I 2021-12-17 19:26:37,305] Trial 2332 pruned. [I 2021-12-17 19:26:39,311] Trial 2333 pruned. [I 2021-12-17 19:26:40,868] Trial 2334 pruned. [I 2021-12-17 19:26:42,628] Trial 2335 pruned. [I 2021-12-17 19:26:43,075] Trial 2336 pruned. [I 2021-12-17 19:26:50,175] Trial 2337 pruned. [I 2021-12-17 19:26:53,201] Trial 2338 pruned. [I 2021-12-17 19:26:53,786] Trial 2339 pruned. [I 2021-12-17 19:27:00,976] Trial 2340 pruned. [I 2021-12-17 19:27:02,731] Trial 2341 pruned. [I 2021-12-17 19:27:09,495] Trial 2342 pruned. [I 2021-12-17 19:27:11,160] Trial 2343 pruned. [I 2021-12-17 19:27:14,114] Trial 2344 pruned. [I 2021-12-17 19:27:16,005] Trial 2345 pruned. [I 2021-12-17 19:27:17,644] Trial 2346 pruned. [I 2021-12-17 19:27:20,797] Trial 2347 pruned. [I 2021-12-17 19:27:21,938] Trial 2348 pruned. [I 2021-12-17 19:27:23,268] Trial 2349 pruned. [I 2021-12-17 19:27:26,730] Trial 2350 pruned. [I 2021-12-17 19:27:28,147] Trial 2351 pruned. [I 2021-12-17 19:27:30,782] Trial 2352 pruned. [I 2021-12-17 19:27:31,741] Trial 2353 pruned. [I 2021-12-17 19:27:33,066] Trial 2354 pruned. [I 2021-12-17 19:27:34,838] Trial 2355 pruned. [I 2021-12-17 19:27:38,362] Trial 2356 pruned. [I 2021-12-17 19:27:43,427] Trial 2357 pruned. [I 2021-12-17 19:27:46,714] Trial 2358 pruned. [I 2021-12-17 19:27:52,582] Trial 2359 pruned. [I 2021-12-17 19:27:54,131] Trial 2360 pruned. [I 2021-12-17 19:27:55,577] Trial 2361 pruned. [I 2021-12-17 19:27:56,303] Trial 2362 pruned. [I 2021-12-17 19:27:56,808] Trial 2363 pruned. [I 2021-12-17 19:27:58,642] Trial 2364 pruned. [I 2021-12-17 19:28:02,606] Trial 2365 pruned. [I 2021-12-17 19:28:06,149] Trial 2366 pruned. [I 2021-12-17 19:28:08,175] Trial 2367 pruned. [I 2021-12-17 19:28:14,511] Trial 2368 pruned. [I 2021-12-17 19:28:16,077] Trial 2369 pruned. [I 2021-12-17 19:28:20,315] Trial 2370 pruned. [I 2021-12-17 19:28:22,953] Trial 2371 pruned. [I 2021-12-17 19:28:29,258] Trial 2372 pruned. [I 2021-12-17 19:28:30,697] Trial 2373 pruned. [I 2021-12-17 19:28:31,793] Trial 2374 pruned. [I 2021-12-17 19:28:34,657] Trial 2375 pruned. [I 2021-12-17 19:28:35,695] Trial 2376 pruned. [I 2021-12-17 19:28:37,960] Trial 2377 pruned. [I 2021-12-17 19:28:40,472] Trial 2378 pruned. [I 2021-12-17 19:28:52,460] Trial 2379 pruned. [I 2021-12-17 19:28:54,241] Trial 2380 pruned. [I 2021-12-17 19:28:57,040] Trial 2381 pruned. [I 2021-12-17 19:28:58,700] Trial 2382 pruned. [I 2021-12-17 19:28:59,422] Trial 2383 pruned. [I 2021-12-17 19:29:05,257] Trial 2384 pruned. [I 2021-12-17 19:29:07,055] Trial 2385 pruned. [I 2021-12-17 19:29:08,143] Trial 2386 pruned. [I 2021-12-17 19:29:08,829] Trial 2387 pruned. [I 2021-12-17 19:29:15,020] Trial 2388 pruned. [I 2021-12-17 19:29:16,644] Trial 2389 pruned. [I 2021-12-17 19:29:20,307] Trial 2390 pruned. [I 2021-12-17 19:29:21,963] Trial 2391 pruned. [I 2021-12-17 19:29:23,317] Trial 2392 pruned. [I 2021-12-17 19:29:25,318] Trial 2393 pruned. [I 2021-12-17 19:29:26,904] Trial 2394 pruned. [I 2021-12-17 19:29:34,354] Trial 2395 pruned. [I 2021-12-17 19:29:36,014] Trial 2396 pruned. [I 2021-12-17 19:29:37,632] Trial 2397 pruned. [I 2021-12-17 19:29:39,195] Trial 2398 pruned. [I 2021-12-17 19:29:40,993] Trial 2399 pruned. [I 2021-12-17 19:29:42,085] Trial 2400 pruned. [I 2021-12-17 19:29:44,390] Trial 2401 pruned. [I 2021-12-17 19:29:48,532] Trial 2402 pruned. [I 2021-12-17 19:29:50,036] Trial 2403 pruned. [I 2021-12-17 19:29:51,205] Trial 2404 pruned. [I 2021-12-17 19:29:54,613] Trial 2405 pruned. [I 2021-12-17 19:29:57,654] Trial 2406 pruned. [I 2021-12-17 19:29:58,425] Trial 2407 pruned. [I 2021-12-17 19:30:00,037] Trial 2408 pruned. [I 2021-12-17 19:30:06,209] Trial 2409 pruned. [I 2021-12-17 19:30:07,087] Trial 2410 pruned. [I 2021-12-17 19:30:08,900] Trial 2411 pruned. [I 2021-12-17 19:30:11,966] Trial 2412 pruned. [I 2021-12-17 19:30:12,479] Trial 2413 pruned. [I 2021-12-17 19:30:13,792] Trial 2414 pruned. [I 2021-12-17 19:30:16,598] Trial 2415 pruned. [I 2021-12-17 19:30:18,966] Trial 2416 pruned. [I 2021-12-17 19:30:20,588] Trial 2417 pruned. [I 2021-12-17 19:30:22,458] Trial 2418 pruned. [I 2021-12-17 19:30:24,277] Trial 2419 pruned. [I 2021-12-17 19:30:28,006] Trial 2420 pruned. [I 2021-12-17 19:30:30,909] Trial 2421 pruned. [I 2021-12-17 19:30:38,463] Trial 2422 pruned. [I 2021-12-17 19:30:39,604] Trial 2423 pruned. [I 2021-12-17 19:30:40,635] Trial 2424 pruned. [I 2021-12-17 19:30:44,090] Trial 2425 pruned. [I 2021-12-17 19:30:46,034] Trial 2426 pruned. [I 2021-12-17 19:30:49,150] Trial 2427 pruned. [I 2021-12-17 19:30:50,933] Trial 2428 pruned. [I 2021-12-17 19:30:53,929] Trial 2429 pruned. [I 2021-12-17 19:30:55,710] Trial 2430 pruned. [I 2021-12-17 19:30:56,427] Trial 2431 pruned. [I 2021-12-17 19:30:58,156] Trial 2432 pruned. [I 2021-12-17 19:31:00,113] Trial 2433 pruned. [I 2021-12-17 19:31:07,729] Trial 2434 pruned. [I 2021-12-17 19:31:08,220] Trial 2435 pruned. [I 2021-12-17 19:31:09,705] Trial 2436 pruned. [I 2021-12-17 19:31:12,986] Trial 2437 pruned. [I 2021-12-17 19:31:14,439] Trial 2438 pruned. [I 2021-12-17 19:31:16,407] Trial 2439 pruned. [I 2021-12-17 19:31:20,725] Trial 2440 pruned. [I 2021-12-17 19:31:24,292] Trial 2441 pruned. [I 2021-12-17 19:35:56,752] Trial 2442 finished with value: 158.52357482910156 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 760, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 2442 with value: 158.52357482910156. [I 2021-12-17 19:36:02,463] Trial 2443 pruned. [I 2021-12-17 19:36:08,339] Trial 2444 pruned. [I 2021-12-17 19:36:11,418] Trial 2445 pruned. [I 2021-12-17 19:36:17,233] Trial 2446 pruned. [I 2021-12-17 19:40:32,771] Trial 2447 finished with value: 178.03958129882812 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 714, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 2442 with value: 158.52357482910156. [I 2021-12-17 19:40:38,366] Trial 2448 pruned. [I 2021-12-17 19:40:40,089] Trial 2449 pruned. [I 2021-12-17 19:40:41,024] Trial 2450 pruned. [I 2021-12-17 19:40:44,021] Trial 2451 pruned. [I 2021-12-17 19:40:46,945] Trial 2452 pruned. [I 2021-12-17 19:40:48,431] Trial 2453 pruned. [I 2021-12-17 19:40:51,053] Trial 2454 pruned. [I 2021-12-17 19:40:54,512] Trial 2455 pruned. [I 2021-12-17 19:40:55,469] Trial 2456 pruned. [I 2021-12-17 19:41:00,597] Trial 2457 pruned. [I 2021-12-17 19:41:07,805] Trial 2458 pruned. [I 2021-12-17 19:41:14,015] Trial 2459 pruned. [I 2021-12-17 19:41:19,198] Trial 2460 pruned. [I 2021-12-17 19:41:20,135] Trial 2461 pruned. [I 2021-12-17 19:41:20,520] Trial 2462 pruned. [I 2021-12-17 19:41:25,795] Trial 2463 pruned. [I 2021-12-17 19:41:28,874] Trial 2464 pruned. [I 2021-12-17 19:45:19,015] Trial 2465 finished with value: 169.57565307617188 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 636, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 2442 with value: 158.52357482910156. [I 2021-12-17 19:45:21,725] Trial 2466 pruned. [I 2021-12-17 19:45:28,153] Trial 2467 pruned. [I 2021-12-17 19:45:29,062] Trial 2468 pruned. [I 2021-12-17 19:45:37,827] Trial 2469 pruned. [I 2021-12-17 19:45:40,423] Trial 2470 pruned. [I 2021-12-17 19:45:41,484] Trial 2471 pruned. [I 2021-12-17 19:45:44,088] Trial 2472 pruned. [I 2021-12-17 19:45:45,098] Trial 2473 pruned. [I 2021-12-17 19:45:46,967] Trial 2474 pruned. [I 2021-12-17 19:45:52,601] Trial 2475 pruned. [I 2021-12-17 19:46:04,455] Trial 2476 pruned. [I 2021-12-17 19:46:05,476] Trial 2477 pruned. [I 2021-12-17 19:46:31,503] Trial 2478 pruned. [I 2021-12-17 19:46:33,878] Trial 2479 pruned. [I 2021-12-17 19:46:34,437] Trial 2480 pruned. [I 2021-12-17 19:46:51,274] Trial 2481 pruned. [I 2021-12-17 19:46:58,746] Trial 2482 pruned. [I 2021-12-17 19:47:04,471] Trial 2483 pruned. [I 2021-12-17 19:47:05,199] Trial 2484 pruned. [I 2021-12-17 19:47:12,052] Trial 2485 pruned. [I 2021-12-17 19:47:13,907] Trial 2486 pruned. [I 2021-12-17 19:47:17,502] Trial 2487 pruned. [I 2021-12-17 19:47:22,835] Trial 2488 pruned. [I 2021-12-17 19:47:25,566] Trial 2489 pruned. [I 2021-12-17 19:47:31,384] Trial 2490 pruned. [I 2021-12-17 19:47:37,663] Trial 2491 pruned. [I 2021-12-17 19:47:41,185] Trial 2492 pruned. [I 2021-12-17 19:47:47,254] Trial 2493 pruned. [I 2021-12-17 19:47:58,638] Trial 2494 pruned. [I 2021-12-17 19:48:01,293] Trial 2495 pruned. [I 2021-12-17 19:48:05,167] Trial 2496 pruned. [I 2021-12-17 19:48:13,780] Trial 2497 pruned. [I 2021-12-17 19:48:15,372] Trial 2498 pruned. [I 2021-12-17 19:48:16,578] Trial 2499 pruned. [I 2021-12-17 19:48:18,693] Trial 2500 pruned. [I 2021-12-17 19:48:25,341] Trial 2501 pruned. [I 2021-12-17 19:48:32,513] Trial 2502 pruned. [I 2021-12-17 19:48:33,456] Trial 2503 pruned. [I 2021-12-17 19:48:34,528] Trial 2504 pruned. [I 2021-12-17 19:48:37,100] Trial 2505 pruned. [I 2021-12-17 19:48:38,095] Trial 2506 pruned. [I 2021-12-17 19:48:41,104] Trial 2507 pruned. [I 2021-12-17 19:48:41,967] Trial 2508 pruned. [I 2021-12-17 19:48:43,662] Trial 2509 pruned. [I 2021-12-17 19:48:47,114] Trial 2510 pruned. [I 2021-12-17 19:48:48,198] Trial 2511 pruned. [I 2021-12-17 19:48:50,159] Trial 2512 pruned. [I 2021-12-17 19:48:53,450] Trial 2513 pruned. [I 2021-12-17 19:48:55,254] Trial 2514 pruned. [I 2021-12-17 19:48:58,499] Trial 2515 pruned. [I 2021-12-17 19:49:03,503] Trial 2516 pruned. [I 2021-12-17 19:49:18,208] Trial 2517 pruned. [I 2021-12-17 19:49:21,218] Trial 2518 pruned. [I 2021-12-17 19:49:22,991] Trial 2519 pruned. [I 2021-12-17 19:49:25,323] Trial 2520 pruned. [I 2021-12-17 19:49:26,323] Trial 2521 pruned. [I 2021-12-17 19:49:34,345] Trial 2522 pruned. [I 2021-12-17 19:49:36,271] Trial 2523 pruned. [I 2021-12-17 19:49:37,897] Trial 2524 pruned. [I 2021-12-17 19:49:41,322] Trial 2525 pruned. [I 2021-12-17 19:49:42,254] Trial 2526 pruned. [I 2021-12-17 19:49:44,962] Trial 2527 pruned. [I 2021-12-17 19:49:46,642] Trial 2528 pruned. [I 2021-12-17 19:49:47,784] Trial 2529 pruned. [I 2021-12-17 19:49:53,383] Trial 2530 pruned. [I 2021-12-17 19:49:57,211] Trial 2531 pruned. [I 2021-12-17 19:49:58,919] Trial 2532 pruned. [I 2021-12-17 19:49:59,418] Trial 2533 pruned. [I 2021-12-17 19:50:05,717] Trial 2534 pruned. [I 2021-12-17 19:50:09,376] Trial 2535 pruned. [I 2021-12-17 19:50:10,476] Trial 2536 pruned. [I 2021-12-17 19:50:12,294] Trial 2537 pruned. [I 2021-12-17 19:50:13,918] Trial 2538 pruned. [I 2021-12-17 19:50:17,128] Trial 2539 pruned. [I 2021-12-17 19:50:20,601] Trial 2540 pruned. [I 2021-12-17 19:50:24,181] Trial 2541 pruned. [I 2021-12-17 19:50:27,124] Trial 2542 pruned. [I 2021-12-17 19:50:28,620] Trial 2543 pruned. [I 2021-12-17 19:50:30,402] Trial 2544 pruned. [I 2021-12-17 19:50:32,392] Trial 2545 pruned. [I 2021-12-17 19:50:37,142] Trial 2546 pruned. [I 2021-12-17 19:50:40,642] Trial 2547 pruned. [I 2021-12-17 19:50:43,879] Trial 2548 pruned. [I 2021-12-17 19:50:45,561] Trial 2549 pruned. [I 2021-12-17 19:50:50,213] Trial 2550 pruned. [I 2021-12-17 19:50:53,075] Trial 2551 pruned. [I 2021-12-17 19:50:53,772] Trial 2552 pruned. [I 2021-12-17 19:50:55,556] Trial 2553 pruned. [I 2021-12-17 19:51:01,109] Trial 2554 pruned. [I 2021-12-17 19:51:10,563] Trial 2555 pruned. [I 2021-12-17 19:51:14,142] Trial 2556 pruned. [I 2021-12-17 19:51:14,765] Trial 2557 pruned. [I 2021-12-17 19:51:17,725] Trial 2558 pruned. [I 2021-12-17 19:51:19,571] Trial 2559 pruned. [I 2021-12-17 19:51:21,517] Trial 2560 pruned. [I 2021-12-17 19:51:22,477] Trial 2561 pruned. [I 2021-12-17 19:51:24,784] Trial 2562 pruned. [I 2021-12-17 19:51:31,970] Trial 2563 pruned. [I 2021-12-17 19:51:34,514] Trial 2564 pruned. [I 2021-12-17 19:54:18,317] Trial 2565 finished with value: 162.0247039794922 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 398, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 2442 with value: 158.52357482910156. [I 2021-12-17 19:54:21,671] Trial 2566 pruned. [I 2021-12-17 19:54:23,377] Trial 2567 pruned. [I 2021-12-17 19:54:27,084] Trial 2568 pruned. [I 2021-12-17 19:54:33,308] Trial 2569 pruned. [I 2021-12-17 19:54:35,166] Trial 2570 pruned. [I 2021-12-17 19:54:38,243] Trial 2571 pruned. [I 2021-12-17 19:54:41,525] Trial 2572 pruned. [I 2021-12-17 19:54:43,294] Trial 2573 pruned. [I 2021-12-17 19:54:48,492] Trial 2574 pruned. [I 2021-12-17 19:54:50,052] Trial 2575 pruned. [I 2021-12-17 19:54:51,648] Trial 2576 pruned. [I 2021-12-17 19:54:52,841] Trial 2577 pruned. [I 2021-12-17 19:54:56,205] Trial 2578 pruned. [I 2021-12-17 19:57:23,666] Trial 2579 finished with value: 177.28761291503906 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 334, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 2442 with value: 158.52357482910156. [I 2021-12-17 19:57:26,997] Trial 2580 pruned. [I 2021-12-17 19:57:28,860] Trial 2581 pruned. [I 2021-12-17 19:57:29,740] Trial 2582 pruned. [I 2021-12-17 19:57:32,089] Trial 2583 pruned. [I 2021-12-17 19:57:33,773] Trial 2584 pruned. [I 2021-12-17 19:57:35,412] Trial 2585 pruned. [I 2021-12-17 19:57:40,923] Trial 2586 pruned. [I 2021-12-17 19:57:42,674] Trial 2587 pruned. [I 2021-12-17 19:57:44,813] Trial 2588 pruned. [I 2021-12-17 19:57:46,522] Trial 2589 pruned. [I 2021-12-17 19:57:52,255] Trial 2590 pruned. [I 2021-12-17 19:57:55,805] Trial 2591 pruned. [I 2021-12-17 19:57:59,030] Trial 2592 pruned. [I 2021-12-17 19:58:00,890] Trial 2593 pruned. [I 2021-12-17 19:58:02,108] Trial 2594 pruned. [I 2021-12-17 19:58:05,252] Trial 2595 pruned. [I 2021-12-17 19:58:09,064] Trial 2596 pruned. [I 2021-12-17 19:58:10,787] Trial 2597 pruned. [I 2021-12-17 19:58:17,825] Trial 2598 pruned. [I 2021-12-17 19:58:19,460] Trial 2599 pruned. [I 2021-12-17 19:58:23,391] Trial 2600 pruned. [I 2021-12-17 19:58:24,397] Trial 2601 pruned. [I 2021-12-17 19:58:26,191] Trial 2602 pruned. [I 2021-12-17 19:58:28,020] Trial 2603 pruned. [I 2021-12-17 19:58:29,718] Trial 2604 pruned. [I 2021-12-17 19:58:30,402] Trial 2605 pruned. [I 2021-12-17 19:58:32,178] Trial 2606 pruned. [I 2021-12-17 19:58:34,022] Trial 2607 pruned. [I 2021-12-17 19:58:35,929] Trial 2608 pruned. [I 2021-12-17 19:58:37,996] Trial 2609 pruned. [I 2021-12-17 19:58:41,403] Trial 2610 pruned. [I 2021-12-17 19:58:43,895] Trial 2611 pruned. [I 2021-12-17 19:58:46,537] Trial 2612 pruned. [I 2021-12-17 19:58:48,122] Trial 2613 pruned. [I 2021-12-17 19:58:53,365] Trial 2614 pruned. [I 2021-12-17 19:58:55,080] Trial 2615 pruned. [I 2021-12-17 19:58:56,170] Trial 2616 pruned. [I 2021-12-17 19:58:58,133] Trial 2617 pruned. [I 2021-12-17 19:59:01,285] Trial 2618 pruned. [I 2021-12-17 19:59:03,153] Trial 2619 pruned. [I 2021-12-17 19:59:05,117] Trial 2620 pruned. [I 2021-12-17 19:59:13,259] Trial 2621 pruned. [I 2021-12-17 19:59:19,549] Trial 2622 pruned. [I 2021-12-17 19:59:23,344] Trial 2623 pruned. [I 2021-12-17 19:59:25,235] Trial 2624 pruned. [I 2021-12-17 19:59:27,271] Trial 2625 pruned. [I 2021-12-17 19:59:28,940] Trial 2626 pruned. [I 2021-12-17 19:59:30,068] Trial 2627 pruned. [I 2021-12-17 19:59:33,644] Trial 2628 pruned. [I 2021-12-17 19:59:34,127] Trial 2629 pruned. [I 2021-12-17 19:59:36,122] Trial 2630 pruned. [I 2021-12-17 19:59:37,917] Trial 2631 pruned. [I 2021-12-17 19:59:44,085] Trial 2632 pruned. [I 2021-12-17 19:59:47,806] Trial 2633 pruned. [I 2021-12-17 19:59:49,665] Trial 2634 pruned. [I 2021-12-17 19:59:51,344] Trial 2635 pruned. [I 2021-12-17 19:59:56,084] Trial 2636 pruned. [I 2021-12-17 19:59:59,017] Trial 2637 pruned. [I 2021-12-17 20:00:00,950] Trial 2638 pruned. [I 2021-12-17 20:00:08,201] Trial 2639 pruned. [I 2021-12-17 20:00:09,926] Trial 2640 pruned. [I 2021-12-17 20:00:11,875] Trial 2641 pruned. [I 2021-12-17 20:00:13,551] Trial 2642 pruned. [I 2021-12-17 20:00:22,763] Trial 2643 pruned. [I 2021-12-17 20:00:24,762] Trial 2644 pruned. [I 2021-12-17 20:00:28,251] Trial 2645 pruned. [I 2021-12-17 20:00:31,548] Trial 2646 pruned. [I 2021-12-17 20:00:35,171] Trial 2647 pruned. [I 2021-12-17 20:00:37,050] Trial 2648 pruned. [I 2021-12-17 20:00:37,746] Trial 2649 pruned. [I 2021-12-17 20:00:40,659] Trial 2650 pruned. [I 2021-12-17 20:00:46,513] Trial 2651 pruned. [I 2021-12-17 20:00:48,684] Trial 2652 pruned. [I 2021-12-17 20:00:49,152] Trial 2653 pruned. [I 2021-12-17 20:00:54,769] Trial 2654 pruned. [I 2021-12-17 20:00:57,263] Trial 2655 pruned. [I 2021-12-17 20:01:01,683] Trial 2656 pruned. [I 2021-12-17 20:01:05,357] Trial 2657 pruned. [I 2021-12-17 20:01:12,574] Trial 2658 pruned. [I 2021-12-17 20:01:14,233] Trial 2659 pruned. [I 2021-12-17 20:01:17,703] Trial 2660 pruned. [I 2021-12-17 20:01:19,371] Trial 2661 pruned. [I 2021-12-17 20:01:26,956] Trial 2662 pruned. [I 2021-12-17 20:01:29,741] Trial 2663 pruned. [I 2021-12-17 20:01:32,990] Trial 2664 pruned. [I 2021-12-17 20:01:36,487] Trial 2665 pruned. [I 2021-12-17 20:01:37,438] Trial 2666 pruned. [I 2021-12-17 20:01:40,547] Trial 2667 pruned. [I 2021-12-17 20:01:44,510] Trial 2668 pruned. [I 2021-12-17 20:01:48,396] Trial 2669 pruned. [I 2021-12-17 20:01:50,329] Trial 2670 pruned. [I 2021-12-17 20:01:52,135] Trial 2671 pruned. [I 2021-12-17 20:01:53,755] Trial 2672 pruned. [I 2021-12-17 20:01:56,926] Trial 2673 pruned. [I 2021-12-17 20:01:57,663] Trial 2674 pruned. [I 2021-12-17 20:01:59,459] Trial 2675 pruned. [I 2021-12-17 20:02:02,102] Trial 2676 pruned. [I 2021-12-17 20:02:05,761] Trial 2677 pruned. [I 2021-12-17 20:02:09,032] Trial 2678 pruned. [I 2021-12-17 20:02:12,556] Trial 2679 pruned. [I 2021-12-17 20:02:14,379] Trial 2680 pruned. [I 2021-12-17 20:02:14,995] Trial 2681 pruned. [I 2021-12-17 20:02:18,023] Trial 2682 pruned. [I 2021-12-17 20:02:21,114] Trial 2683 pruned. [I 2021-12-17 20:02:22,969] Trial 2684 pruned. [I 2021-12-17 20:04:59,156] Trial 2685 finished with value: 161.69497680664062 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 368, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 2442 with value: 158.52357482910156. [I 2021-12-17 20:05:00,777] Trial 2686 pruned. [I 2021-12-17 20:05:02,518] Trial 2687 pruned. [I 2021-12-17 20:05:05,669] Trial 2688 pruned. [I 2021-12-17 20:05:10,186] Trial 2689 pruned. [I 2021-12-17 20:05:13,313] Trial 2690 pruned. [I 2021-12-17 20:05:15,025] Trial 2691 pruned. [I 2021-12-17 20:05:18,433] Trial 2692 pruned. [I 2021-12-17 20:05:20,035] Trial 2693 pruned. [I 2021-12-17 20:05:21,041] Trial 2694 pruned. [I 2021-12-17 20:05:22,694] Trial 2695 pruned. [I 2021-12-17 20:05:24,732] Trial 2696 pruned. [I 2021-12-17 20:05:25,710] Trial 2697 pruned. [I 2021-12-17 20:05:35,203] Trial 2698 pruned. [I 2021-12-17 20:05:35,858] Trial 2699 pruned. [I 2021-12-17 20:05:41,141] Trial 2700 pruned. [I 2021-12-17 20:05:42,930] Trial 2701 pruned. [I 2021-12-17 20:05:43,735] Trial 2702 pruned. [I 2021-12-17 20:05:45,467] Trial 2703 pruned. [I 2021-12-17 20:05:50,282] Trial 2704 pruned. [I 2021-12-17 20:05:52,098] Trial 2705 pruned. [I 2021-12-17 20:05:55,097] Trial 2706 pruned. [I 2021-12-17 20:06:01,521] Trial 2707 pruned. [I 2021-12-17 20:06:04,876] Trial 2708 pruned. [I 2021-12-17 20:06:06,669] Trial 2709 pruned. [I 2021-12-17 20:06:10,050] Trial 2710 pruned. [I 2021-12-17 20:06:13,048] Trial 2711 pruned. [I 2021-12-17 20:06:16,094] Trial 2712 pruned. [I 2021-12-17 20:06:17,718] Trial 2713 pruned. [I 2021-12-17 20:06:19,443] Trial 2714 pruned. [I 2021-12-17 20:06:25,314] Trial 2715 pruned. [I 2021-12-17 20:06:28,584] Trial 2716 pruned. [I 2021-12-17 20:06:30,331] Trial 2717 pruned. [I 2021-12-17 20:06:32,221] Trial 2718 pruned. [I 2021-12-17 20:06:35,800] Trial 2719 pruned. [I 2021-12-17 20:06:37,529] Trial 2720 pruned. [I 2021-12-17 20:06:45,315] Trial 2721 pruned. [I 2021-12-17 20:06:46,039] Trial 2722 pruned. [I 2021-12-17 20:06:48,238] Trial 2723 pruned. [I 2021-12-17 20:06:51,514] Trial 2724 pruned. [I 2021-12-17 20:06:54,638] Trial 2725 pruned. [I 2021-12-17 20:06:55,170] Trial 2726 pruned. [I 2021-12-17 20:06:58,555] Trial 2727 pruned. [I 2021-12-17 20:07:04,501] Trial 2728 pruned. [I 2021-12-17 20:07:07,677] Trial 2729 pruned. [I 2021-12-17 20:07:14,396] Trial 2730 pruned. [I 2021-12-17 20:07:16,166] Trial 2731 pruned. [I 2021-12-17 20:07:19,168] Trial 2732 pruned. [I 2021-12-17 20:07:25,349] Trial 2733 pruned. [I 2021-12-17 20:07:28,533] Trial 2734 pruned. [I 2021-12-17 20:07:31,317] Trial 2735 pruned. [I 2021-12-17 20:07:33,080] Trial 2736 pruned. [I 2021-12-17 20:07:36,379] Trial 2737 pruned. [I 2021-12-17 20:07:38,239] Trial 2738 pruned. [I 2021-12-17 20:07:39,325] Trial 2739 pruned. [I 2021-12-17 20:07:41,439] Trial 2740 pruned. [I 2021-12-17 20:07:43,166] Trial 2741 pruned. [I 2021-12-17 20:07:45,328] Trial 2742 pruned. [I 2021-12-17 20:07:51,298] Trial 2743 pruned. [I 2021-12-17 20:07:54,711] Trial 2744 pruned. [I 2021-12-17 20:07:57,088] Trial 2745 pruned. [I 2021-12-17 20:07:58,181] Trial 2746 pruned. [I 2021-12-17 20:08:00,823] Trial 2747 pruned. [I 2021-12-17 20:08:03,430] Trial 2748 pruned. [I 2021-12-17 20:08:05,534] Trial 2749 pruned. [I 2021-12-17 20:08:08,869] Trial 2750 pruned. [I 2021-12-17 20:08:09,450] Trial 2751 pruned. [I 2021-12-17 20:08:14,112] Trial 2752 pruned. [I 2021-12-17 20:08:15,980] Trial 2753 pruned. [I 2021-12-17 20:08:17,642] Trial 2754 pruned. [I 2021-12-17 20:08:21,624] Trial 2755 pruned. [I 2021-12-17 20:08:23,372] Trial 2756 pruned. [I 2021-12-17 20:08:26,938] Trial 2757 pruned. [I 2021-12-17 20:08:30,647] Trial 2758 pruned. [I 2021-12-17 20:08:31,857] Trial 2759 pruned. [I 2021-12-17 20:08:33,656] Trial 2760 pruned. [I 2021-12-17 20:08:36,699] Trial 2761 pruned. [I 2021-12-17 20:08:41,881] Trial 2762 pruned. [I 2021-12-17 20:08:42,901] Trial 2763 pruned. [I 2021-12-17 20:08:47,640] Trial 2764 pruned. [I 2021-12-17 20:08:51,453] Trial 2765 pruned. [I 2021-12-17 20:08:54,258] Trial 2766 pruned. [I 2021-12-17 20:08:57,710] Trial 2767 pruned. [I 2021-12-17 20:08:59,528] Trial 2768 pruned. [I 2021-12-17 20:09:01,227] Trial 2769 pruned. [I 2021-12-17 20:09:02,012] Trial 2770 pruned. [I 2021-12-17 20:09:05,074] Trial 2771 pruned. [I 2021-12-17 20:09:07,025] Trial 2772 pruned. [I 2021-12-17 20:09:10,380] Trial 2773 pruned. [I 2021-12-17 20:09:15,575] Trial 2774 pruned. [I 2021-12-17 20:09:16,714] Trial 2775 pruned. [I 2021-12-17 20:09:17,695] Trial 2776 pruned. [I 2021-12-17 20:09:22,798] Trial 2777 pruned. [I 2021-12-17 20:09:26,689] Trial 2778 pruned. [I 2021-12-17 20:09:28,652] Trial 2779 pruned. [I 2021-12-17 20:09:30,306] Trial 2780 pruned. [I 2021-12-17 20:09:33,591] Trial 2781 pruned. [I 2021-12-17 20:09:37,036] Trial 2782 pruned. [I 2021-12-17 20:09:39,271] Trial 2783 pruned. [I 2021-12-17 20:09:40,433] Trial 2784 pruned. [I 2021-12-17 20:09:47,612] Trial 2785 pruned. [I 2021-12-17 20:09:49,208] Trial 2786 pruned. [I 2021-12-17 20:09:50,975] Trial 2787 pruned. [I 2021-12-17 20:09:52,043] Trial 2788 pruned. [I 2021-12-17 20:09:55,337] Trial 2789 pruned. [I 2021-12-17 20:09:57,466] Trial 2790 pruned. [I 2021-12-17 20:10:03,018] Trial 2791 pruned. [I 2021-12-17 20:10:08,263] Trial 2792 pruned. [I 2021-12-17 20:10:11,275] Trial 2793 pruned. [I 2021-12-17 20:10:13,026] Trial 2794 pruned. [I 2021-12-17 20:10:13,699] Trial 2795 pruned. [I 2021-12-17 20:10:15,591] Trial 2796 pruned. [I 2021-12-17 20:10:17,360] Trial 2797 pruned. [I 2021-12-17 20:10:26,118] Trial 2798 pruned. [I 2021-12-17 20:10:37,584] Trial 2799 pruned. [I 2021-12-17 20:10:38,203] Trial 2800 pruned. [I 2021-12-17 20:10:39,970] Trial 2801 pruned. [I 2021-12-17 20:10:46,690] Trial 2802 pruned. [I 2021-12-17 20:10:51,557] Trial 2803 pruned. [I 2021-12-17 20:10:53,138] Trial 2804 pruned. [I 2021-12-17 20:10:55,785] Trial 2805 pruned. [I 2021-12-17 20:10:57,644] Trial 2806 pruned. [I 2021-12-17 20:10:59,301] Trial 2807 pruned. [I 2021-12-17 20:11:02,528] Trial 2808 pruned. [I 2021-12-17 20:11:06,089] Trial 2809 pruned. [I 2021-12-17 20:11:07,769] Trial 2810 pruned. [I 2021-12-17 20:11:09,606] Trial 2811 pruned. [I 2021-12-17 20:11:11,538] Trial 2812 pruned. [I 2021-12-17 20:11:14,902] Trial 2813 pruned. [I 2021-12-17 20:11:17,313] Trial 2814 pruned. [I 2021-12-17 20:11:24,797] Trial 2815 pruned. [I 2021-12-17 20:11:27,501] Trial 2816 pruned. [I 2021-12-17 20:11:29,416] Trial 2817 pruned. [I 2021-12-17 20:11:30,081] Trial 2818 pruned. [I 2021-12-17 20:11:35,344] Trial 2819 pruned. [I 2021-12-17 20:11:37,018] Trial 2820 pruned. [I 2021-12-17 20:11:39,035] Trial 2821 pruned. [I 2021-12-17 20:11:40,879] Trial 2822 pruned. [I 2021-12-17 20:14:14,516] Trial 2823 finished with value: 182.70187377929688 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 358, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 2442 with value: 158.52357482910156. [I 2021-12-17 20:14:16,424] Trial 2824 pruned. [I 2021-12-17 20:14:17,356] Trial 2825 pruned. [I 2021-12-17 20:14:21,235] Trial 2826 pruned. [I 2021-12-17 20:14:25,382] Trial 2827 pruned. [I 2021-12-17 20:14:27,106] Trial 2828 pruned. [I 2021-12-17 20:14:30,153] Trial 2829 pruned. [I 2021-12-17 20:14:36,580] Trial 2830 pruned. [I 2021-12-17 20:14:42,173] Trial 2831 pruned. [I 2021-12-17 20:14:43,898] Trial 2832 pruned. [I 2021-12-17 20:14:47,438] Trial 2833 pruned. [I 2021-12-17 20:14:53,306] Trial 2834 pruned. [I 2021-12-17 20:14:54,948] Trial 2835 pruned. [I 2021-12-17 20:14:56,843] Trial 2836 pruned. [I 2021-12-17 20:14:58,648] Trial 2837 pruned. [I 2021-12-17 20:15:05,906] Trial 2838 pruned. [I 2021-12-17 20:15:09,640] Trial 2839 pruned. [I 2021-12-17 20:15:11,855] Trial 2840 pruned. [I 2021-12-17 20:15:13,487] Trial 2841 pruned. [I 2021-12-17 20:15:14,238] Trial 2842 pruned. [I 2021-12-17 20:15:17,579] Trial 2843 pruned. [I 2021-12-17 20:15:19,454] Trial 2844 pruned. [I 2021-12-17 20:15:22,942] Trial 2845 pruned. [I 2021-12-17 20:15:25,460] Trial 2846 pruned. [I 2021-12-17 20:15:25,981] Trial 2847 pruned. [I 2021-12-17 20:15:29,783] Trial 2848 pruned. [I 2021-12-17 20:15:31,692] Trial 2849 pruned. [I 2021-12-17 20:15:33,515] Trial 2850 pruned. [I 2021-12-17 20:15:34,739] Trial 2851 pruned. [I 2021-12-17 20:15:36,749] Trial 2852 pruned. [I 2021-12-17 20:15:40,166] Trial 2853 pruned. [I 2021-12-17 20:15:41,662] Trial 2854 pruned. [I 2021-12-17 20:15:45,534] Trial 2855 pruned. [I 2021-12-17 20:15:55,764] Trial 2856 pruned. [I 2021-12-17 20:15:57,508] Trial 2857 pruned. [I 2021-12-17 20:16:00,749] Trial 2858 pruned. [I 2021-12-17 20:16:04,696] Trial 2859 pruned. [I 2021-12-17 20:16:06,292] Trial 2860 pruned. [I 2021-12-17 20:16:07,409] Trial 2861 pruned. [I 2021-12-17 20:16:10,902] Trial 2862 pruned. [I 2021-12-17 20:16:16,250] Trial 2863 pruned. [I 2021-12-17 20:16:18,509] Trial 2864 pruned. [I 2021-12-17 20:16:20,494] Trial 2865 pruned. [I 2021-12-17 20:16:26,584] Trial 2866 pruned. [I 2021-12-17 20:16:30,665] Trial 2867 pruned. [I 2021-12-17 20:16:31,411] Trial 2868 pruned. [I 2021-12-17 20:16:33,167] Trial 2869 pruned. [I 2021-12-17 20:16:35,795] Trial 2870 pruned. [I 2021-12-17 20:16:37,286] Trial 2871 pruned. [I 2021-12-17 20:16:40,885] Trial 2872 pruned. [I 2021-12-17 20:16:42,953] Trial 2873 pruned. [I 2021-12-17 20:16:46,172] Trial 2874 pruned. [I 2021-12-17 20:16:49,269] Trial 2875 pruned. [I 2021-12-17 20:16:52,855] Trial 2876 pruned. [I 2021-12-17 20:16:54,677] Trial 2877 pruned. [I 2021-12-17 20:21:26,531] Trial 2878 finished with value: 170.5189971923828 and parameters: {'batch_size': 16, 'n_hdn_layers': 3, 'neurons_HL1': 774, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu'}. Best is trial 2442 with value: 158.52357482910156. [I 2021-12-17 20:21:29,800] Trial 2879 pruned. [I 2021-12-17 20:21:33,270] Trial 2880 pruned. [I 2021-12-17 20:21:35,225] Trial 2881 pruned. [I 2021-12-17 20:21:36,982] Trial 2882 pruned. [I 2021-12-17 20:21:38,208] Trial 2883 pruned. [I 2021-12-17 20:21:40,667] Trial 2884 pruned. [I 2021-12-17 20:21:44,255] Trial 2885 pruned. [I 2021-12-17 20:21:47,436] Trial 2886 pruned. [I 2021-12-17 20:21:49,282] Trial 2887 pruned. [I 2021-12-17 20:21:51,046] Trial 2888 pruned. [I 2021-12-17 20:21:54,243] Trial 2889 pruned. [I 2021-12-17 20:22:07,061] Trial 2890 pruned. [I 2021-12-17 20:22:07,856] Trial 2891 pruned. [I 2021-12-17 20:22:11,301] Trial 2892 pruned. [I 2021-12-17 20:22:13,359] Trial 2893 pruned. [I 2021-12-17 20:22:14,775] Trial 2894 pruned. [I 2021-12-17 20:22:17,759] Trial 2895 pruned. [I 2021-12-17 20:22:19,591] Trial 2896 pruned. [I 2021-12-17 20:22:20,152] Trial 2897 pruned. [I 2021-12-17 20:22:24,063] Trial 2898 pruned. [I 2021-12-17 20:22:27,215] Trial 2899 pruned. [I 2021-12-17 20:22:28,892] Trial 2900 pruned. [I 2021-12-17 20:22:30,993] Trial 2901 pruned. [I 2021-12-17 20:22:34,150] Trial 2902 pruned. [I 2021-12-17 20:22:37,968] Trial 2903 pruned. [I 2021-12-17 20:22:40,222] Trial 2904 pruned. [I 2021-12-17 20:22:42,972] Trial 2905 pruned. [I 2021-12-17 20:22:48,099] Trial 2906 pruned. [I 2021-12-17 20:22:55,066] Trial 2907 pruned. [I 2021-12-17 20:22:56,755] Trial 2908 pruned. [I 2021-12-17 20:23:03,030] Trial 2909 pruned. [I 2021-12-17 20:23:04,835] Trial 2910 pruned. [I 2021-12-17 20:23:07,548] Trial 2911 pruned. [I 2021-12-17 20:23:08,623] Trial 2912 pruned. [I 2021-12-17 20:23:10,941] Trial 2913 pruned. [I 2021-12-17 20:23:16,012] Trial 2914 pruned. [I 2021-12-17 20:23:16,767] Trial 2915 pruned. [I 2021-12-17 20:23:17,849] Trial 2916 pruned. [I 2021-12-17 20:23:36,193] Trial 2917 pruned. [I 2021-12-17 20:23:39,822] Trial 2918 pruned. [I 2021-12-17 20:23:43,069] Trial 2919 pruned. [I 2021-12-17 20:23:44,852] Trial 2920 pruned. [I 2021-12-17 20:23:45,482] Trial 2921 pruned. [I 2021-12-17 20:23:51,991] Trial 2922 pruned. [I 2021-12-17 20:23:55,273] Trial 2923 pruned. [I 2021-12-17 20:23:57,056] Trial 2924 pruned. [I 2021-12-17 20:23:58,329] Trial 2925 pruned. [I 2021-12-17 20:24:01,292] Trial 2926 pruned. [I 2021-12-17 20:24:04,563] Trial 2927 pruned. [I 2021-12-17 20:24:06,630] Trial 2928 pruned. [I 2021-12-17 20:24:08,616] Trial 2929 pruned. [I 2021-12-17 20:24:11,969] Trial 2930 pruned. [I 2021-12-17 20:24:26,524] Trial 2931 pruned. [I 2021-12-17 20:24:27,714] Trial 2932 pruned. [I 2021-12-17 20:24:29,583] Trial 2933 pruned. [I 2021-12-17 20:24:33,778] Trial 2934 pruned. [I 2021-12-17 20:24:36,779] Trial 2935 pruned. [I 2021-12-17 20:24:46,313] Trial 2936 pruned. [I 2021-12-17 20:24:48,377] Trial 2937 pruned. [I 2021-12-17 20:24:50,202] Trial 2938 pruned. [I 2021-12-17 20:24:50,898] Trial 2939 pruned. [I 2021-12-17 20:25:00,173] Trial 2940 pruned. [I 2021-12-17 20:25:01,730] Trial 2941 pruned. [I 2021-12-17 20:25:05,089] Trial 2942 pruned. [I 2021-12-17 20:25:05,688] Trial 2943 pruned. [I 2021-12-17 20:25:09,426] Trial 2944 pruned. [I 2021-12-17 20:25:12,739] Trial 2945 pruned. [I 2021-12-17 20:25:13,943] Trial 2946 pruned. [I 2021-12-17 20:25:17,100] Trial 2947 pruned. [I 2021-12-17 20:25:19,068] Trial 2948 pruned. [I 2021-12-17 20:25:22,659] Trial 2949 pruned. [I 2021-12-17 20:25:26,022] Trial 2950 pruned. [I 2021-12-17 20:25:29,371] Trial 2951 pruned. [I 2021-12-17 20:25:30,998] Trial 2952 pruned. [I 2021-12-17 20:25:38,279] Trial 2953 pruned. [I 2021-12-17 20:25:39,944] Trial 2954 pruned. [I 2021-12-17 20:25:42,049] Trial 2955 pruned. [I 2021-12-17 20:25:45,577] Trial 2956 pruned. [I 2021-12-17 20:25:46,559] Trial 2957 pruned. [I 2021-12-17 20:25:52,028] Trial 2958 pruned. [I 2021-12-17 20:25:54,017] Trial 2959 pruned. [I 2021-12-17 20:25:55,765] Trial 2960 pruned. [I 2021-12-17 20:25:58,084] Trial 2961 pruned. [I 2021-12-17 20:26:01,494] Trial 2962 pruned. [I 2021-12-17 20:26:08,920] Trial 2963 pruned. [I 2021-12-17 20:26:09,936] Trial 2964 pruned. [I 2021-12-17 20:26:19,008] Trial 2965 pruned. [I 2021-12-17 20:26:20,639] Trial 2966 pruned. [I 2021-12-17 20:26:25,712] Trial 2967 pruned. [I 2021-12-17 20:26:26,285] Trial 2968 pruned. [I 2021-12-17 20:26:28,236] Trial 2969 pruned. [I 2021-12-17 20:26:32,200] Trial 2970 pruned. [I 2021-12-17 20:26:33,751] Trial 2971 pruned. [I 2021-12-17 20:26:36,713] Trial 2972 pruned. [I 2021-12-17 20:26:40,197] Trial 2973 pruned. [I 2021-12-17 20:26:42,208] Trial 2974 pruned. [I 2021-12-17 20:26:43,818] Trial 2975 pruned. [I 2021-12-17 20:26:47,241] Trial 2976 pruned. [I 2021-12-17 20:26:50,037] Trial 2977 pruned. [I 2021-12-17 20:26:52,013] Trial 2978 pruned. [I 2021-12-17 20:26:54,067] Trial 2979 pruned. [I 2021-12-17 20:26:55,151] Trial 2980 pruned. [I 2021-12-17 20:26:57,009] Trial 2981 pruned. [I 2021-12-17 20:26:58,640] Trial 2982 pruned. [I 2021-12-17 20:27:03,354] Trial 2983 pruned. [I 2021-12-17 20:27:06,403] Trial 2984 pruned. [I 2021-12-17 20:27:07,974] Trial 2985 pruned. [I 2021-12-17 20:27:11,174] Trial 2986 pruned. [I 2021-12-17 20:27:12,634] Trial 2987 pruned. [I 2021-12-17 20:27:15,871] Trial 2988 pruned. [I 2021-12-17 20:27:16,668] Trial 2989 pruned. [I 2021-12-17 20:27:20,996] Trial 2990 pruned. [I 2021-12-17 20:27:24,710] Trial 2991 pruned. [I 2021-12-17 20:27:25,365] Trial 2992 pruned. [I 2021-12-17 20:27:27,115] Trial 2993 pruned. [I 2021-12-17 20:27:29,262] Trial 2994 pruned. [I 2021-12-17 20:27:32,330] Trial 2995 pruned. [I 2021-12-17 20:27:33,960] Trial 2996 pruned. [I 2021-12-17 20:27:36,561] Trial 2997 pruned. [I 2021-12-17 20:27:38,506] Trial 2998 pruned. [I 2021-12-17 20:27:41,851] Trial 2999 pruned. [I 2021-12-17 20:27:43,503] Trial 3000 pruned. [I 2021-12-17 20:27:51,060] Trial 3001 pruned. [I 2021-12-17 20:27:52,951] Trial 3002 pruned. [I 2021-12-17 20:27:57,204] Trial 3003 pruned. [I 2021-12-17 20:27:59,407] Trial 3004 pruned. [I 2021-12-17 20:28:02,665] Trial 3005 pruned. [I 2021-12-17 20:28:04,499] Trial 3006 pruned. [I 2021-12-17 20:28:06,409] Trial 3007 pruned. [I 2021-12-17 20:28:07,556] Trial 3008 pruned. [I 2021-12-17 20:28:13,491] Trial 3009 pruned. [I 2021-12-17 20:28:16,193] Trial 3010 pruned. [I 2021-12-17 20:28:17,004] Trial 3011 pruned. [I 2021-12-17 20:28:18,878] Trial 3012 pruned. [I 2021-12-17 20:28:21,808] Trial 3013 pruned. [I 2021-12-17 20:28:25,160] Trial 3014 pruned. [I 2021-12-17 20:28:25,792] Trial 3015 pruned. [I 2021-12-17 20:28:27,438] Trial 3016 pruned. [I 2021-12-17 20:28:29,227] Trial 3017 pruned. [I 2021-12-17 20:28:32,706] Trial 3018 pruned. [I 2021-12-17 20:28:34,542] Trial 3019 pruned. [I 2021-12-17 20:28:38,365] Trial 3020 pruned. [I 2021-12-17 20:28:40,374] Trial 3021 pruned. [I 2021-12-17 20:28:46,775] Trial 3022 pruned. [I 2021-12-17 20:28:48,472] Trial 3023 pruned. [I 2021-12-17 20:28:50,682] Trial 3024 pruned. [I 2021-12-17 20:28:52,540] Trial 3025 pruned. [I 2021-12-17 20:28:54,471] Trial 3026 pruned. [I 2021-12-17 20:29:02,315] Trial 3027 pruned. [I 2021-12-17 20:29:05,208] Trial 3028 pruned. [I 2021-12-17 20:29:08,475] Trial 3029 pruned. [I 2021-12-17 20:29:13,005] Trial 3030 pruned. [I 2021-12-17 20:29:14,704] Trial 3031 pruned. [I 2021-12-17 20:29:18,187] Trial 3032 pruned. [I 2021-12-17 20:29:21,340] Trial 3033 pruned. [I 2021-12-17 20:29:22,908] Trial 3034 pruned. [I 2021-12-17 20:29:23,785] Trial 3035 pruned. [I 2021-12-17 20:29:28,982] Trial 3036 pruned. [I 2021-12-17 20:29:39,582] Trial 3037 pruned. [I 2021-12-17 20:29:41,438] Trial 3038 pruned. [I 2021-12-17 20:29:42,212] Trial 3039 pruned. [I 2021-12-17 20:29:45,995] Trial 3040 pruned. [I 2021-12-17 20:29:46,897] Trial 3041 pruned. [I 2021-12-17 20:29:48,704] Trial 3042 pruned. [I 2021-12-17 20:29:52,712] Trial 3043 pruned. [I 2021-12-17 20:29:56,158] Trial 3044 pruned. [I 2021-12-17 20:29:59,032] Trial 3045 pruned. [I 2021-12-17 20:30:01,372] Trial 3046 pruned. [I 2021-12-17 20:30:04,651] Trial 3047 pruned. [I 2021-12-17 20:30:06,318] Trial 3048 pruned. [I 2021-12-17 20:30:08,537] Trial 3049 pruned. [I 2021-12-17 20:30:15,242] Trial 3050 pruned. [I 2021-12-17 20:30:17,136] Trial 3051 pruned. [I 2021-12-17 20:30:18,874] Trial 3052 pruned. [I 2021-12-17 20:30:23,256] Trial 3053 pruned. [I 2021-12-17 20:30:25,193] Trial 3054 pruned. [I 2021-12-17 20:30:39,466] Trial 3055 pruned. [I 2021-12-17 20:30:41,608] Trial 3056 pruned. [I 2021-12-17 20:30:44,767] Trial 3057 pruned. [I 2021-12-17 20:30:50,151] Trial 3058 pruned. [I 2021-12-17 20:30:51,111] Trial 3059 pruned. [I 2021-12-17 20:30:55,576] Trial 3060 pruned. [I 2021-12-17 20:30:57,649] Trial 3061 pruned. [I 2021-12-17 20:31:00,341] Trial 3062 pruned. [I 2021-12-17 20:31:03,616] Trial 3063 pruned. [I 2021-12-17 20:31:05,456] Trial 3064 pruned. [I 2021-12-17 20:31:07,165] Trial 3065 pruned. [I 2021-12-17 20:31:07,805] Trial 3066 pruned. [I 2021-12-17 20:31:09,756] Trial 3067 pruned. [I 2021-12-17 20:31:11,460] Trial 3068 pruned. [I 2021-12-17 20:31:23,432] Trial 3069 pruned. [I 2021-12-17 20:31:25,394] Trial 3070 pruned. [I 2021-12-17 20:31:28,660] Trial 3071 pruned. [I 2021-12-17 20:31:32,040] Trial 3072 pruned. [I 2021-12-17 20:31:35,228] Trial 3073 pruned. [I 2021-12-17 20:31:39,500] Trial 3074 pruned. [I 2021-12-17 20:31:41,434] Trial 3075 pruned. [I 2021-12-17 20:31:42,529] Trial 3076 pruned. [I 2021-12-17 20:31:46,005] Trial 3077 pruned. [I 2021-12-17 20:31:47,847] Trial 3078 pruned. [I 2021-12-17 20:31:51,719] Trial 3079 pruned. [I 2021-12-17 20:31:53,543] Trial 3080 pruned. [I 2021-12-17 20:31:55,361] Trial 3081 pruned. [I 2021-12-17 20:32:00,899] Trial 3082 pruned. [I 2021-12-17 20:32:01,616] Trial 3083 pruned. [I 2021-12-17 20:32:03,284] Trial 3084 pruned. [I 2021-12-17 20:32:06,983] Trial 3085 pruned. [I 2021-12-17 20:32:10,045] Trial 3086 pruned. [I 2021-12-17 20:32:10,692] Trial 3087 pruned. [I 2021-12-17 20:32:14,375] Trial 3088 pruned. [I 2021-12-17 20:32:16,165] Trial 3089 pruned. [I 2021-12-17 20:32:18,798] Trial 3090 pruned. [I 2021-12-17 20:32:21,110] Trial 3091 pruned. [I 2021-12-17 20:32:22,909] Trial 3092 pruned. [I 2021-12-17 20:32:24,043] Trial 3093 pruned. [I 2021-12-17 20:32:30,305] Trial 3094 pruned. [I 2021-12-17 20:32:33,445] Trial 3095 pruned. [I 2021-12-17 20:32:37,213] Trial 3096 pruned. [I 2021-12-17 20:32:38,984] Trial 3097 pruned. [I 2021-12-17 20:32:40,414] Trial 3098 pruned. [I 2021-12-17 20:32:43,811] Trial 3099 pruned. [I 2021-12-17 20:32:57,800] Trial 3100 pruned. [I 2021-12-17 20:32:58,975] Trial 3101 pruned. [I 2021-12-17 20:33:02,435] Trial 3102 pruned. [I 2021-12-17 20:33:04,178] Trial 3103 pruned. [I 2021-12-17 20:33:06,030] Trial 3104 pruned. [I 2021-12-17 20:33:09,002] Trial 3105 pruned. [I 2021-12-17 20:33:10,905] Trial 3106 pruned. [I 2021-12-17 20:33:12,988] Trial 3107 pruned. [I 2021-12-17 20:33:13,694] Trial 3108 pruned. [I 2021-12-17 20:33:17,244] Trial 3109 pruned. [I 2021-12-17 20:33:18,811] Trial 3110 pruned. [I 2021-12-17 20:33:19,378] Trial 3111 pruned. [I 2021-12-17 20:33:21,112] Trial 3112 pruned. [I 2021-12-17 20:33:23,294] Trial 3113 pruned. [I 2021-12-17 20:33:27,704] Trial 3114 pruned. [I 2021-12-17 20:33:29,849] Trial 3115 pruned. [I 2021-12-17 20:33:35,128] Trial 3116 pruned. [I 2021-12-17 20:33:36,868] Trial 3117 pruned. [I 2021-12-17 20:33:38,964] Trial 3118 pruned. [I 2021-12-17 20:33:42,365] Trial 3119 pruned. [I 2021-12-17 20:34:06,115] Trial 3120 pruned. [I 2021-12-17 20:34:23,432] Trial 3121 pruned. [I 2021-12-17 20:34:25,141] Trial 3122 pruned. [I 2021-12-17 20:34:31,132] Trial 3123 pruned. [I 2021-12-17 20:34:33,023] Trial 3124 pruned. [I 2021-12-17 20:34:34,113] Trial 3125 pruned. [I 2021-12-17 20:34:36,132] Trial 3126 pruned. [I 2021-12-17 20:34:43,635] Trial 3127 pruned. [I 2021-12-17 20:34:45,293] Trial 3128 pruned. [I 2021-12-17 20:34:48,758] Trial 3129 pruned. [I 2021-12-17 20:34:50,745] Trial 3130 pruned. [I 2021-12-17 20:34:51,506] Trial 3131 pruned. [I 2021-12-17 20:34:53,491] Trial 3132 pruned. [I 2021-12-17 20:34:56,662] Trial 3133 pruned. [I 2021-12-17 20:34:58,075] Trial 3134 pruned. [I 2021-12-17 20:35:01,598] Trial 3135 pruned. [I 2021-12-17 20:35:02,237] Trial 3136 pruned. [I 2021-12-17 20:35:09,236] Trial 3137 pruned. [I 2021-12-17 20:35:10,838] Trial 3138 pruned. [I 2021-12-17 20:35:14,527] Trial 3139 pruned. [I 2021-12-17 20:35:16,345] Trial 3140 pruned. [I 2021-12-17 20:35:18,303] Trial 3141 pruned. [I 2021-12-17 20:35:21,152] Trial 3142 pruned. [I 2021-12-17 20:35:22,263] Trial 3143 pruned. [I 2021-12-17 20:35:24,215] Trial 3144 pruned. [I 2021-12-17 20:35:27,581] Trial 3145 pruned. [I 2021-12-17 20:35:30,566] Trial 3146 pruned. [I 2021-12-17 20:35:33,412] Trial 3147 pruned. [I 2021-12-17 20:35:34,497] Trial 3148 pruned. [I 2021-12-17 20:35:36,395] Trial 3149 pruned. [I 2021-12-17 20:35:39,139] Trial 3150 pruned. [I 2021-12-17 20:35:41,189] Trial 3151 pruned. [I 2021-12-17 20:35:44,467] Trial 3152 pruned. [I 2021-12-17 20:35:46,443] Trial 3153 pruned. [I 2021-12-17 20:35:50,738] Trial 3154 pruned. [I 2021-12-17 20:35:51,282] Trial 3155 pruned. [I 2021-12-17 20:35:54,842] Trial 3156 pruned. [I 2021-12-17 20:35:59,842] Trial 3157 pruned. [I 2021-12-17 20:36:02,975] Trial 3158 pruned. [I 2021-12-17 20:36:03,626] Trial 3159 pruned. [I 2021-12-17 20:36:11,632] Trial 3160 pruned. [I 2021-12-17 20:36:13,586] Trial 3161 pruned. [I 2021-12-17 20:36:15,215] Trial 3162 pruned. [I 2021-12-17 20:36:19,624] Trial 3163 pruned. [I 2021-12-17 20:36:25,888] Trial 3164 pruned. [I 2021-12-17 20:36:32,599] Trial 3165 pruned. [I 2021-12-17 20:36:34,354] Trial 3166 pruned. [I 2021-12-17 20:36:37,623] Trial 3167 pruned. [I 2021-12-17 20:36:39,656] Trial 3168 pruned. [I 2021-12-17 20:36:43,262] Trial 3169 pruned. [I 2021-12-17 20:37:00,904] Trial 3170 pruned. [I 2021-12-17 20:37:02,762] Trial 3171 pruned. [I 2021-12-17 20:37:04,518] Trial 3172 pruned. [I 2021-12-17 20:37:06,166] Trial 3173 pruned. [I 2021-12-17 20:37:09,819] Trial 3174 pruned. [I 2021-12-17 20:37:11,186] Trial 3175 pruned. [I 2021-12-17 20:37:12,358] Trial 3176 pruned. [I 2021-12-17 20:37:17,136] Trial 3177 pruned. [I 2021-12-17 20:37:19,725] Trial 3178 pruned. [I 2021-12-17 20:37:23,687] Trial 3179 pruned. [I 2021-12-17 20:37:27,266] Trial 3180 pruned. [I 2021-12-17 20:37:28,054] Trial 3181 pruned. [I 2021-12-17 20:37:31,056] Trial 3182 pruned. [I 2021-12-17 20:37:36,768] Trial 3183 pruned. [I 2021-12-17 20:37:37,392] Trial 3184 pruned. [I 2021-12-17 20:37:43,206] Trial 3185 pruned. [I 2021-12-17 20:40:12,581] Trial 3186 finished with value: 159.0045166015625 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 340, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 2442 with value: 158.52357482910156. [I 2021-12-17 20:40:14,579] Trial 3187 pruned. [I 2021-12-17 20:40:16,420] Trial 3188 pruned. [I 2021-12-17 20:40:19,725] Trial 3189 pruned. [I 2021-12-17 20:40:21,614] Trial 3190 pruned. [I 2021-12-17 20:40:25,034] Trial 3191 pruned. [I 2021-12-17 20:40:26,950] Trial 3192 pruned. [I 2021-12-17 20:40:30,475] Trial 3193 pruned. [I 2021-12-17 20:40:32,299] Trial 3194 pruned. [I 2021-12-17 20:40:34,188] Trial 3195 pruned. [I 2021-12-17 20:40:35,654] Trial 3196 pruned. [I 2021-12-17 20:40:37,326] Trial 3197 pruned. [I 2021-12-17 20:40:39,126] Trial 3198 pruned. [I 2021-12-17 20:40:42,450] Trial 3199 pruned. [I 2021-12-17 20:40:43,576] Trial 3200 pruned. [I 2021-12-17 20:40:47,179] Trial 3201 pruned. [I 2021-12-17 20:40:49,018] Trial 3202 pruned. [I 2021-12-17 20:40:50,983] Trial 3203 pruned. [I 2021-12-17 20:40:51,978] Trial 3204 pruned. [I 2021-12-17 20:40:53,003] Trial 3205 pruned. [I 2021-12-17 20:40:56,648] Trial 3206 pruned. [I 2021-12-17 20:40:58,434] Trial 3207 pruned. [I 2021-12-17 20:40:58,995] Trial 3208 pruned. [I 2021-12-17 20:41:02,615] Trial 3209 pruned. [I 2021-12-17 20:41:05,892] Trial 3210 pruned. [I 2021-12-17 20:41:09,587] Trial 3211 pruned. [I 2021-12-17 20:41:11,989] Trial 3212 pruned. [I 2021-12-17 20:41:13,843] Trial 3213 pruned. [I 2021-12-17 20:41:17,961] Trial 3214 pruned. [I 2021-12-17 20:41:20,087] Trial 3215 pruned. [I 2021-12-17 20:41:21,943] Trial 3216 pruned. [I 2021-12-17 20:41:23,685] Trial 3217 pruned. [I 2021-12-17 20:41:27,367] Trial 3218 pruned. [I 2021-12-17 20:41:29,169] Trial 3219 pruned. [I 2021-12-17 20:41:32,696] Trial 3220 pruned. [I 2021-12-17 20:41:33,761] Trial 3221 pruned. [I 2021-12-17 20:41:35,834] Trial 3222 pruned. [I 2021-12-17 20:41:40,104] Trial 3223 pruned. [I 2021-12-17 20:41:43,386] Trial 3224 pruned. [I 2021-12-17 20:41:45,443] Trial 3225 pruned. [I 2021-12-17 20:41:48,905] Trial 3226 pruned. [I 2021-12-17 20:42:01,705] Trial 3227 pruned. [I 2021-12-17 20:42:03,413] Trial 3228 pruned. [I 2021-12-17 20:42:08,505] Trial 3229 pruned. [I 2021-12-17 20:42:09,312] Trial 3230 pruned. [I 2021-12-17 20:42:11,137] Trial 3231 pruned. [I 2021-12-17 20:42:11,740] Trial 3232 pruned. [I 2021-12-17 20:42:14,453] Trial 3233 pruned. [I 2021-12-17 20:42:20,424] Trial 3234 pruned. [I 2021-12-17 20:42:22,321] Trial 3235 pruned. [I 2021-12-17 20:42:25,963] Trial 3236 pruned. [I 2021-12-17 20:42:27,563] Trial 3237 pruned. [I 2021-12-17 20:42:28,938] Trial 3238 pruned. [I 2021-12-17 20:42:34,873] Trial 3239 pruned. [I 2021-12-17 20:42:36,837] Trial 3240 pruned. [I 2021-12-17 20:42:40,448] Trial 3241 pruned. [I 2021-12-17 20:42:42,276] Trial 3242 pruned. [I 2021-12-17 20:42:43,937] Trial 3243 pruned. [I 2021-12-17 20:42:45,146] Trial 3244 pruned. [I 2021-12-17 20:42:47,331] Trial 3245 pruned. [I 2021-12-17 20:42:50,857] Trial 3246 pruned. [I 2021-12-17 20:42:52,628] Trial 3247 pruned. [I 2021-12-17 20:42:58,679] Trial 3248 pruned. [I 2021-12-17 20:43:00,598] Trial 3249 pruned. [I 2021-12-17 20:43:03,914] Trial 3250 pruned. [I 2021-12-17 20:43:07,168] Trial 3251 pruned. [I 2021-12-17 20:43:07,871] Trial 3252 pruned. [I 2021-12-17 20:43:09,807] Trial 3253 pruned. [I 2021-12-17 20:43:11,667] Trial 3254 pruned. [I 2021-12-17 20:43:13,457] Trial 3255 pruned. [I 2021-12-17 20:43:14,087] Trial 3256 pruned. [I 2021-12-17 20:43:16,095] Trial 3257 pruned. [I 2021-12-17 20:43:23,465] Trial 3258 pruned. [I 2021-12-17 20:43:24,830] Trial 3259 pruned. [I 2021-12-17 20:43:30,359] Trial 3260 pruned. [I 2021-12-17 20:43:33,576] Trial 3261 pruned. [I 2021-12-17 20:43:35,456] Trial 3262 pruned. [I 2021-12-17 20:43:37,422] Trial 3263 pruned. [I 2021-12-17 20:43:39,050] Trial 3264 pruned. [I 2021-12-17 20:43:43,122] Trial 3265 pruned. [I 2021-12-17 20:43:46,832] Trial 3266 pruned. [I 2021-12-17 20:43:49,022] Trial 3267 pruned. [I 2021-12-17 20:43:50,976] Trial 3268 pruned. [I 2021-12-17 20:43:52,240] Trial 3269 pruned. [I 2021-12-17 20:43:55,800] Trial 3270 pruned. [I 2021-12-17 20:48:10,572] Trial 3271 finished with value: 158.00619506835938 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 714, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 20:48:18,488] Trial 3272 pruned. [I 2021-12-17 20:48:23,801] Trial 3273 pruned. [I 2021-12-17 20:48:26,685] Trial 3274 pruned. [I 2021-12-17 20:53:25,156] Trial 3275 finished with value: 178.9312286376953 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 834, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 20:53:27,602] Trial 3276 pruned. [I 2021-12-17 20:53:30,625] Trial 3277 pruned. [I 2021-12-17 20:53:36,017] Trial 3278 pruned. [I 2021-12-17 20:53:39,006] Trial 3279 pruned. [I 2021-12-17 20:53:42,766] Trial 3280 pruned. [I 2021-12-17 20:53:43,777] Trial 3281 pruned. [I 2021-12-17 20:53:44,596] Trial 3282 pruned. [I 2021-12-17 20:53:49,909] Trial 3283 pruned. [I 2021-12-17 20:53:52,652] Trial 3284 pruned. [I 2021-12-17 20:53:58,242] Trial 3285 pruned. [I 2021-12-17 20:54:05,990] Trial 3286 pruned. [I 2021-12-17 20:54:09,364] Trial 3287 pruned. [I 2021-12-17 20:54:19,847] Trial 3288 pruned. [I 2021-12-17 20:54:22,660] Trial 3289 pruned. [I 2021-12-17 20:54:28,243] Trial 3290 pruned. [I 2021-12-17 20:54:31,241] Trial 3291 pruned. [I 2021-12-17 20:54:33,914] Trial 3292 pruned. [I 2021-12-17 20:54:36,854] Trial 3293 pruned. [I 2021-12-17 20:54:38,436] Trial 3294 pruned. [I 2021-12-17 20:54:41,204] Trial 3295 pruned. [I 2021-12-17 20:54:46,914] Trial 3296 pruned. [I 2021-12-17 20:54:51,543] Trial 3297 pruned. [I 2021-12-17 20:54:56,531] Trial 3298 pruned. [I 2021-12-17 20:55:02,276] Trial 3299 pruned. [I 2021-12-17 20:55:05,044] Trial 3300 pruned. [I 2021-12-17 20:55:07,170] Trial 3301 pruned. [I 2021-12-17 20:55:08,167] Trial 3302 pruned. [I 2021-12-17 20:55:10,938] Trial 3303 pruned. [I 2021-12-17 20:55:13,778] Trial 3304 pruned. [I 2021-12-17 20:55:14,673] Trial 3305 pruned. [I 2021-12-17 20:55:17,536] Trial 3306 pruned. [I 2021-12-17 20:55:23,197] Trial 3307 pruned. [I 2021-12-17 20:55:26,143] Trial 3308 pruned. [I 2021-12-17 20:55:28,929] Trial 3309 pruned. [I 2021-12-17 20:55:31,520] Trial 3310 pruned. [I 2021-12-17 20:55:34,519] Trial 3311 pruned. [I 2021-12-17 20:55:39,810] Trial 3312 pruned. [I 2021-12-17 20:55:42,999] Trial 3313 pruned. [I 2021-12-17 20:55:48,063] Trial 3314 pruned. [I 2021-12-17 20:55:51,015] Trial 3315 pruned. [I 2021-12-17 20:55:53,690] Trial 3316 pruned. [I 2021-12-17 20:56:14,872] Trial 3317 pruned. [I 2021-12-17 20:56:18,009] Trial 3318 pruned. [I 2021-12-17 20:56:20,750] Trial 3319 pruned. [I 2021-12-17 20:56:24,617] Trial 3320 pruned. [I 2021-12-17 20:56:27,455] Trial 3321 pruned. [I 2021-12-17 20:56:29,736] Trial 3322 pruned. [I 2021-12-17 20:56:32,786] Trial 3323 pruned. [I 2021-12-17 20:56:35,481] Trial 3324 pruned. [I 2021-12-17 20:56:38,231] Trial 3325 pruned. [I 2021-12-17 20:56:39,202] Trial 3326 pruned. [I 2021-12-17 20:56:44,307] Trial 3327 pruned. [I 2021-12-17 20:56:47,267] Trial 3328 pruned. [I 2021-12-17 20:56:48,064] Trial 3329 pruned. [I 2021-12-17 20:56:51,206] Trial 3330 pruned. [I 2021-12-17 20:56:59,298] Trial 3331 pruned. [I 2021-12-17 20:57:05,064] Trial 3332 pruned. [I 2021-12-17 20:57:07,812] Trial 3333 pruned. [I 2021-12-17 20:57:10,576] Trial 3334 pruned. [I 2021-12-17 20:57:16,524] Trial 3335 pruned. [I 2021-12-17 20:57:22,392] Trial 3336 pruned. [I 2021-12-17 20:57:24,948] Trial 3337 pruned. [I 2021-12-17 20:57:27,701] Trial 3338 pruned. [I 2021-12-17 20:57:30,572] Trial 3339 pruned. [I 2021-12-17 20:57:32,066] Trial 3340 pruned. [I 2021-12-17 20:57:34,670] Trial 3341 pruned. [I 2021-12-17 20:57:36,034] Trial 3342 pruned. [I 2021-12-17 20:57:39,624] Trial 3343 pruned. [I 2021-12-17 20:57:41,850] Trial 3344 pruned. [I 2021-12-17 20:57:43,926] Trial 3345 pruned. [I 2021-12-17 20:57:46,720] Trial 3346 pruned. [I 2021-12-17 20:57:53,632] Trial 3347 pruned. [I 2021-12-17 20:57:57,042] Trial 3348 pruned. [I 2021-12-17 20:57:58,106] Trial 3349 pruned. [I 2021-12-17 20:58:00,122] Trial 3350 pruned. [I 2021-12-17 20:58:03,093] Trial 3351 pruned. [I 2021-12-17 20:58:18,507] Trial 3352 pruned. [I 2021-12-17 20:58:22,302] Trial 3353 pruned. [I 2021-12-17 20:58:25,378] Trial 3354 pruned. [I 2021-12-17 20:58:26,075] Trial 3355 pruned. [I 2021-12-17 20:58:28,056] Trial 3356 pruned. [I 2021-12-17 20:58:34,124] Trial 3357 pruned. [I 2021-12-17 20:58:37,622] Trial 3358 pruned. [I 2021-12-17 20:58:40,994] Trial 3359 pruned. [I 2021-12-17 20:58:43,742] Trial 3360 pruned. [I 2021-12-17 20:58:46,017] Trial 3361 pruned. [I 2021-12-17 20:58:49,833] Trial 3362 pruned. [I 2021-12-17 20:58:51,844] Trial 3363 pruned. [I 2021-12-17 20:58:53,358] Trial 3364 pruned. [I 2021-12-17 20:58:55,746] Trial 3365 pruned. [I 2021-12-17 20:58:56,909] Trial 3366 pruned. [I 2021-12-17 20:58:58,823] Trial 3367 pruned. [I 2021-12-17 20:59:01,735] Trial 3368 pruned. [I 2021-12-17 20:59:04,054] Trial 3369 pruned. [I 2021-12-17 20:59:06,587] Trial 3370 pruned. [I 2021-12-17 20:59:10,428] Trial 3371 pruned. [I 2021-12-17 20:59:11,949] Trial 3372 pruned. [I 2021-12-17 20:59:12,776] Trial 3373 pruned. [I 2021-12-17 20:59:14,021] Trial 3374 pruned. [I 2021-12-17 20:59:16,199] Trial 3375 pruned. [I 2021-12-17 20:59:18,539] Trial 3376 pruned. [I 2021-12-17 20:59:19,701] Trial 3377 pruned. [I 2021-12-17 20:59:20,334] Trial 3378 pruned. [I 2021-12-17 20:59:22,046] Trial 3379 pruned. [I 2021-12-17 20:59:27,913] Trial 3380 pruned. [I 2021-12-17 20:59:30,564] Trial 3381 pruned. [I 2021-12-17 20:59:32,505] Trial 3382 pruned. [I 2021-12-17 20:59:35,471] Trial 3383 pruned. [I 2021-12-17 20:59:38,066] Trial 3384 pruned. [I 2021-12-17 20:59:41,373] Trial 3385 pruned. [I 2021-12-17 20:59:43,454] Trial 3386 pruned. [I 2021-12-17 20:59:45,248] Trial 3387 pruned. [I 2021-12-17 20:59:47,312] Trial 3388 pruned. [I 2021-12-17 20:59:50,922] Trial 3389 pruned. [I 2021-12-17 20:59:51,631] Trial 3390 pruned. [I 2021-12-17 20:59:53,615] Trial 3391 pruned. [I 2021-12-17 20:59:55,879] Trial 3392 pruned. [I 2021-12-17 21:00:03,174] Trial 3393 pruned. [I 2021-12-17 21:00:07,061] Trial 3394 pruned. [I 2021-12-17 21:00:08,998] Trial 3395 pruned. [I 2021-12-17 21:00:10,676] Trial 3396 pruned. [I 2021-12-17 21:00:11,399] Trial 3397 pruned. [I 2021-12-17 21:00:16,601] Trial 3398 pruned. [I 2021-12-17 21:00:21,992] Trial 3399 pruned. [I 2021-12-17 21:00:25,407] Trial 3400 pruned. [I 2021-12-17 21:00:26,062] Trial 3401 pruned. [I 2021-12-17 21:00:27,772] Trial 3402 pruned. [I 2021-12-17 21:00:31,110] Trial 3403 pruned. [I 2021-12-17 21:00:33,031] Trial 3404 pruned. [I 2021-12-17 21:00:34,545] Trial 3405 pruned. [I 2021-12-17 21:00:41,142] Trial 3406 pruned. [I 2021-12-17 21:00:42,730] Trial 3407 pruned. [I 2021-12-17 21:00:44,523] Trial 3408 pruned. [I 2021-12-17 21:00:50,329] Trial 3409 pruned. [I 2021-12-17 21:00:55,168] Trial 3410 pruned. [I 2021-12-17 21:00:59,645] Trial 3411 pruned. [I 2021-12-17 21:01:00,724] Trial 3412 pruned. [I 2021-12-17 21:01:02,251] Trial 3413 pruned. [I 2021-12-17 21:01:05,844] Trial 3414 pruned. [I 2021-12-17 21:01:07,705] Trial 3415 pruned. [I 2021-12-17 21:01:10,629] Trial 3416 pruned. [I 2021-12-17 21:01:12,753] Trial 3417 pruned. [I 2021-12-17 21:01:14,516] Trial 3418 pruned. [I 2021-12-17 21:01:17,900] Trial 3419 pruned. [I 2021-12-17 21:01:20,553] Trial 3420 pruned. [I 2021-12-17 21:01:21,356] Trial 3421 pruned. [I 2021-12-17 21:01:23,230] Trial 3422 pruned. [I 2021-12-17 21:01:25,047] Trial 3423 pruned. [I 2021-12-17 21:01:26,909] Trial 3424 pruned. [I 2021-12-17 21:01:29,552] Trial 3425 pruned. [I 2021-12-17 21:01:30,305] Trial 3426 pruned. [I 2021-12-17 21:01:32,558] Trial 3427 pruned. [I 2021-12-17 21:01:34,655] Trial 3428 pruned. [I 2021-12-17 21:01:38,351] Trial 3429 pruned. [I 2021-12-17 21:01:40,456] Trial 3430 pruned. [I 2021-12-17 21:01:43,975] Trial 3431 pruned. [I 2021-12-17 21:01:45,205] Trial 3432 pruned. [I 2021-12-17 21:01:46,873] Trial 3433 pruned. [I 2021-12-17 21:01:50,325] Trial 3434 pruned. [I 2021-12-17 21:01:52,050] Trial 3435 pruned. [I 2021-12-17 21:01:53,782] Trial 3436 pruned. [I 2021-12-17 21:01:55,506] Trial 3437 pruned. [I 2021-12-17 21:01:57,200] Trial 3438 pruned. [I 2021-12-17 21:02:00,833] Trial 3439 pruned. [I 2021-12-17 21:02:01,902] Trial 3440 pruned. [I 2021-12-17 21:02:03,889] Trial 3441 pruned. [I 2021-12-17 21:02:05,766] Trial 3442 pruned. [I 2021-12-17 21:02:07,761] Trial 3443 pruned. [I 2021-12-17 21:02:15,595] Trial 3444 pruned. [I 2021-12-17 21:02:16,715] Trial 3445 pruned. [I 2021-12-17 21:02:21,164] Trial 3446 pruned. [I 2021-12-17 21:02:22,955] Trial 3447 pruned. [I 2021-12-17 21:02:24,624] Trial 3448 pruned. [I 2021-12-17 21:02:26,297] Trial 3449 pruned. [I 2021-12-17 21:02:26,937] Trial 3450 pruned. [I 2021-12-17 21:02:28,952] Trial 3451 pruned. [I 2021-12-17 21:08:08,836] Trial 3452 finished with value: 168.9630126953125 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 938, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 21:08:11,051] Trial 3453 pruned. [I 2021-12-17 21:08:12,802] Trial 3454 pruned. [I 2021-12-17 21:08:13,821] Trial 3455 pruned. [I 2021-12-17 21:08:15,627] Trial 3456 pruned. [I 2021-12-17 21:08:17,762] Trial 3457 pruned. [I 2021-12-17 21:08:23,188] Trial 3458 pruned. [I 2021-12-17 21:08:25,080] Trial 3459 pruned. [I 2021-12-17 21:08:27,045] Trial 3460 pruned. [I 2021-12-17 21:08:34,236] Trial 3461 pruned. [I 2021-12-17 21:08:35,389] Trial 3462 pruned. [I 2021-12-17 21:08:36,901] Trial 3463 pruned. [I 2021-12-17 21:08:40,466] Trial 3464 pruned. [I 2021-12-17 21:08:42,320] Trial 3465 pruned. [I 2021-12-17 21:08:49,763] Trial 3466 pruned. [I 2021-12-17 21:08:52,904] Trial 3467 pruned. [I 2021-12-17 21:08:59,948] Trial 3468 pruned. [I 2021-12-17 21:09:02,495] Trial 3469 pruned. [I 2021-12-17 21:09:03,155] Trial 3470 pruned. [I 2021-12-17 21:09:04,214] Trial 3471 pruned. [I 2021-12-17 21:09:07,744] Trial 3472 pruned. [I 2021-12-17 21:09:09,456] Trial 3473 pruned. [I 2021-12-17 21:09:10,380] Trial 3474 pruned. [I 2021-12-17 21:09:12,827] Trial 3475 pruned. [I 2021-12-17 21:09:15,047] Trial 3476 pruned. [I 2021-12-17 21:09:16,958] Trial 3477 pruned. [I 2021-12-17 21:09:19,045] Trial 3478 pruned. [I 2021-12-17 21:09:22,884] Trial 3479 pruned. [I 2021-12-17 21:09:24,698] Trial 3480 pruned. [I 2021-12-17 21:09:26,428] Trial 3481 pruned. [I 2021-12-17 21:09:28,479] Trial 3482 pruned. [I 2021-12-17 21:09:31,517] Trial 3483 pruned. [I 2021-12-17 21:09:34,724] Trial 3484 pruned. [I 2021-12-17 21:09:36,511] Trial 3485 pruned. [I 2021-12-17 21:09:38,437] Trial 3486 pruned. [I 2021-12-17 21:09:39,636] Trial 3487 pruned. [I 2021-12-17 21:09:41,690] Trial 3488 pruned. [I 2021-12-17 21:09:53,657] Trial 3489 pruned. [I 2021-12-17 21:09:57,132] Trial 3490 pruned. [I 2021-12-17 21:10:02,177] Trial 3491 pruned. [I 2021-12-17 21:10:03,822] Trial 3492 pruned. [I 2021-12-17 21:10:05,059] Trial 3493 pruned. [I 2021-12-17 21:10:07,030] Trial 3494 pruned. [I 2021-12-17 21:10:13,613] Trial 3495 pruned. [I 2021-12-17 21:10:15,272] Trial 3496 pruned. [I 2021-12-17 21:10:16,499] Trial 3497 pruned. [I 2021-12-17 21:10:18,618] Trial 3498 pruned. [I 2021-12-17 21:10:19,743] Trial 3499 pruned. [I 2021-12-17 21:10:20,553] Trial 3500 pruned. [I 2021-12-17 21:10:24,055] Trial 3501 pruned. [I 2021-12-17 21:10:29,243] Trial 3502 pruned. [I 2021-12-17 21:10:31,211] Trial 3503 pruned. [I 2021-12-17 21:10:32,882] Trial 3504 pruned. [I 2021-12-17 21:10:35,788] Trial 3505 pruned. [I 2021-12-17 21:10:38,889] Trial 3506 pruned. [I 2021-12-17 21:10:40,832] Trial 3507 pruned. [I 2021-12-17 21:10:42,695] Trial 3508 pruned. [I 2021-12-17 21:10:44,996] Trial 3509 pruned. [I 2021-12-17 21:10:47,776] Trial 3510 pruned. [I 2021-12-17 21:10:54,081] Trial 3511 pruned. [I 2021-12-17 21:10:55,923] Trial 3512 pruned. [I 2021-12-17 21:10:59,500] Trial 3513 pruned. [I 2021-12-17 21:11:01,123] Trial 3514 pruned. [I 2021-12-17 21:11:03,601] Trial 3515 pruned. [I 2021-12-17 21:11:05,123] Trial 3516 pruned. [I 2021-12-17 21:11:06,047] Trial 3517 pruned. [I 2021-12-17 21:11:18,616] Trial 3518 pruned. [I 2021-12-17 21:11:20,382] Trial 3519 pruned. [I 2021-12-17 21:11:22,458] Trial 3520 pruned. [I 2021-12-17 21:11:24,343] Trial 3521 pruned. [I 2021-12-17 21:11:27,989] Trial 3522 pruned. [I 2021-12-17 21:11:31,441] Trial 3523 pruned. [I 2021-12-17 21:11:32,077] Trial 3524 pruned. [I 2021-12-17 21:11:36,218] Trial 3525 pruned. [I 2021-12-17 21:11:37,988] Trial 3526 pruned. [I 2021-12-17 21:11:39,889] Trial 3527 pruned. [I 2021-12-17 21:11:43,632] Trial 3528 pruned. [I 2021-12-17 21:11:48,064] Trial 3529 pruned. [I 2021-12-17 21:11:49,704] Trial 3530 pruned. [I 2021-12-17 21:11:51,399] Trial 3531 pruned. [I 2021-12-17 21:11:53,209] Trial 3532 pruned. [I 2021-12-17 21:11:56,359] Trial 3533 pruned. [I 2021-12-17 21:11:58,221] Trial 3534 pruned. [I 2021-12-17 21:12:01,705] Trial 3535 pruned. [I 2021-12-17 21:12:03,221] Trial 3536 pruned. [I 2021-12-17 21:12:06,369] Trial 3537 pruned. [I 2021-12-17 21:12:08,319] Trial 3538 pruned. [I 2021-12-17 21:12:15,989] Trial 3539 pruned. [I 2021-12-17 21:12:18,226] Trial 3540 pruned. [I 2021-12-17 21:12:19,074] Trial 3541 pruned. [I 2021-12-17 21:12:20,721] Trial 3542 pruned. [I 2021-12-17 21:12:22,578] Trial 3543 pruned. [I 2021-12-17 21:12:25,565] Trial 3544 pruned. [I 2021-12-17 21:12:28,763] Trial 3545 pruned. [I 2021-12-17 21:12:29,418] Trial 3546 pruned. [I 2021-12-17 21:12:33,229] Trial 3547 pruned. [I 2021-12-17 21:12:35,861] Trial 3548 pruned. [I 2021-12-17 21:12:37,847] Trial 3549 pruned. [I 2021-12-17 21:12:40,752] Trial 3550 pruned. [I 2021-12-17 21:12:42,979] Trial 3551 pruned. [I 2021-12-17 21:12:44,737] Trial 3552 pruned. [I 2021-12-17 21:12:48,196] Trial 3553 pruned. [I 2021-12-17 21:12:50,114] Trial 3554 pruned. [I 2021-12-17 21:12:52,166] Trial 3555 pruned. [I 2021-12-17 21:12:56,156] Trial 3556 pruned. [I 2021-12-17 21:13:00,176] Trial 3557 pruned. [I 2021-12-17 21:13:01,261] Trial 3558 pruned. [I 2021-12-17 21:13:03,836] Trial 3559 pruned. [I 2021-12-17 21:13:05,760] Trial 3560 pruned. [I 2021-12-17 21:13:12,167] Trial 3561 pruned. [I 2021-12-17 21:13:13,875] Trial 3562 pruned. [I 2021-12-17 21:13:15,420] Trial 3563 pruned. [I 2021-12-17 21:13:16,425] Trial 3564 pruned. [I 2021-12-17 21:13:19,159] Trial 3565 pruned. [I 2021-12-17 21:13:20,348] Trial 3566 pruned. [I 2021-12-17 21:13:22,423] Trial 3567 pruned. [I 2021-12-17 21:13:26,632] Trial 3568 pruned. [I 2021-12-17 21:13:28,524] Trial 3569 pruned. [I 2021-12-17 21:13:30,541] Trial 3570 pruned. [I 2021-12-17 21:13:32,046] Trial 3571 pruned. [I 2021-12-17 21:13:33,904] Trial 3572 pruned. [I 2021-12-17 21:13:36,017] Trial 3573 pruned. [I 2021-12-17 21:13:39,525] Trial 3574 pruned. [I 2021-12-17 21:13:42,366] Trial 3575 pruned. [I 2021-12-17 21:13:46,053] Trial 3576 pruned. [I 2021-12-17 21:13:47,840] Trial 3577 pruned. [I 2021-12-17 21:13:51,264] Trial 3578 pruned. [I 2021-12-17 21:13:53,169] Trial 3579 pruned. [I 2021-12-17 21:13:54,947] Trial 3580 pruned. [I 2021-12-17 21:13:57,283] Trial 3581 pruned. [I 2021-12-17 21:14:04,484] Trial 3582 pruned. [I 2021-12-17 21:14:06,134] Trial 3583 pruned. [I 2021-12-17 21:14:09,164] Trial 3584 pruned. [I 2021-12-17 21:14:10,314] Trial 3585 pruned. [I 2021-12-17 21:14:16,064] Trial 3586 pruned. [I 2021-12-17 21:14:18,122] Trial 3587 pruned. [I 2021-12-17 21:14:21,675] Trial 3588 pruned. [I 2021-12-17 21:14:25,163] Trial 3589 pruned. [I 2021-12-17 21:14:25,944] Trial 3590 pruned. [I 2021-12-17 21:14:31,893] Trial 3591 pruned. [I 2021-12-17 21:14:35,086] Trial 3592 pruned. [I 2021-12-17 21:14:38,671] Trial 3593 pruned. [I 2021-12-17 21:14:39,465] Trial 3594 pruned. [I 2021-12-17 21:14:42,954] Trial 3595 pruned. [I 2021-12-17 21:14:48,543] Trial 3596 pruned. [I 2021-12-17 21:14:50,100] Trial 3597 pruned. [I 2021-12-17 21:14:55,557] Trial 3598 pruned. [I 2021-12-17 21:15:04,905] Trial 3599 pruned. [I 2021-12-17 21:15:06,873] Trial 3600 pruned. [I 2021-12-17 21:15:08,196] Trial 3601 pruned. [I 2021-12-17 21:15:11,325] Trial 3602 pruned. [I 2021-12-17 21:15:16,538] Trial 3603 pruned. [I 2021-12-17 21:15:18,343] Trial 3604 pruned. [I 2021-12-17 21:15:21,870] Trial 3605 pruned. [I 2021-12-17 21:15:25,869] Trial 3606 pruned. [I 2021-12-17 21:15:26,642] Trial 3607 pruned. [I 2021-12-17 21:15:28,689] Trial 3608 pruned. [I 2021-12-17 21:15:30,493] Trial 3609 pruned. [I 2021-12-17 21:15:33,557] Trial 3610 pruned. [I 2021-12-17 21:15:38,500] Trial 3611 pruned. [I 2021-12-17 21:15:40,361] Trial 3612 pruned. [I 2021-12-17 21:15:42,324] Trial 3613 pruned. [I 2021-12-17 21:15:43,135] Trial 3614 pruned. [I 2021-12-17 21:15:48,952] Trial 3615 pruned. [I 2021-12-17 21:15:50,678] Trial 3616 pruned. [I 2021-12-17 21:15:52,823] Trial 3617 pruned. [I 2021-12-17 21:15:54,853] Trial 3618 pruned. [I 2021-12-17 21:15:55,399] Trial 3619 pruned. [I 2021-12-17 21:15:57,259] Trial 3620 pruned. [I 2021-12-17 21:16:00,793] Trial 3621 pruned. [I 2021-12-17 21:16:02,604] Trial 3622 pruned. [I 2021-12-17 21:16:04,097] Trial 3623 pruned. [I 2021-12-17 21:16:06,237] Trial 3624 pruned. [I 2021-12-17 21:16:10,980] Trial 3625 pruned. [I 2021-12-17 21:16:14,148] Trial 3626 pruned. [I 2021-12-17 21:16:18,521] Trial 3627 pruned. [I 2021-12-17 21:16:20,252] Trial 3628 pruned. [I 2021-12-17 21:16:22,921] Trial 3629 pruned. [I 2021-12-17 21:16:24,828] Trial 3630 pruned. [I 2021-12-17 21:16:26,053] Trial 3631 pruned. [I 2021-12-17 21:16:27,913] Trial 3632 pruned. [I 2021-12-17 21:16:31,519] Trial 3633 pruned. [I 2021-12-17 21:16:33,356] Trial 3634 pruned. [I 2021-12-17 21:16:35,192] Trial 3635 pruned. [I 2021-12-17 21:16:37,153] Trial 3636 pruned. [I 2021-12-17 21:16:40,678] Trial 3637 pruned. [I 2021-12-17 21:16:41,837] Trial 3638 pruned. [I 2021-12-17 21:16:43,594] Trial 3639 pruned. [I 2021-12-17 21:16:45,270] Trial 3640 pruned. [I 2021-12-17 21:16:49,514] Trial 3641 pruned. [I 2021-12-17 21:16:50,211] Trial 3642 pruned. [I 2021-12-17 21:16:54,912] Trial 3643 pruned. [I 2021-12-17 21:16:58,726] Trial 3644 pruned. [I 2021-12-17 21:17:03,634] Trial 3645 pruned. [I 2021-12-17 21:17:06,344] Trial 3646 pruned. [I 2021-12-17 21:17:07,905] Trial 3647 pruned. [I 2021-12-17 21:17:11,467] Trial 3648 pruned. [I 2021-12-17 21:17:13,313] Trial 3649 pruned. [I 2021-12-17 21:17:14,987] Trial 3650 pruned. [I 2021-12-17 21:17:20,894] Trial 3651 pruned. [I 2021-12-17 21:17:22,811] Trial 3652 pruned. [I 2021-12-17 21:17:26,224] Trial 3653 pruned. [I 2021-12-17 21:17:27,946] Trial 3654 pruned. [I 2021-12-17 21:17:29,021] Trial 3655 pruned. [I 2021-12-17 21:17:30,883] Trial 3656 pruned. [I 2021-12-17 21:17:34,650] Trial 3657 pruned. [I 2021-12-17 21:17:38,166] Trial 3658 pruned. [I 2021-12-17 21:17:41,544] Trial 3659 pruned. [I 2021-12-17 21:17:45,089] Trial 3660 pruned. [I 2021-12-17 21:17:46,898] Trial 3661 pruned. [I 2021-12-17 21:17:47,849] Trial 3662 pruned. [I 2021-12-17 21:17:51,326] Trial 3663 pruned. [I 2021-12-17 21:17:54,542] Trial 3664 pruned. [I 2021-12-17 21:17:57,930] Trial 3665 pruned. [I 2021-12-17 21:17:59,652] Trial 3666 pruned. [I 2021-12-17 21:18:13,405] Trial 3667 pruned. [I 2021-12-17 21:18:14,764] Trial 3668 pruned. [I 2021-12-17 21:18:15,441] Trial 3669 pruned. [I 2021-12-17 21:18:18,903] Trial 3670 pruned. [I 2021-12-17 21:18:20,959] Trial 3671 pruned. [I 2021-12-17 21:18:22,048] Trial 3672 pruned. [I 2021-12-17 21:18:23,819] Trial 3673 pruned. [I 2021-12-17 21:18:25,781] Trial 3674 pruned. [I 2021-12-17 21:18:29,763] Trial 3675 pruned. [I 2021-12-17 21:18:31,141] Trial 3676 pruned. [I 2021-12-17 21:18:34,656] Trial 3677 pruned. [I 2021-12-17 21:18:37,949] Trial 3678 pruned. [I 2021-12-17 21:18:39,093] Trial 3679 pruned. [I 2021-12-17 21:18:42,621] Trial 3680 pruned. [I 2021-12-17 21:18:48,044] Trial 3681 pruned. [I 2021-12-17 21:18:52,000] Trial 3682 pruned. [I 2021-12-17 21:18:53,656] Trial 3683 pruned. [I 2021-12-17 21:18:55,780] Trial 3684 pruned. [I 2021-12-17 21:18:57,101] Trial 3685 pruned. [I 2021-12-17 21:18:58,326] Trial 3686 pruned. [I 2021-12-17 21:19:02,148] Trial 3687 pruned. [I 2021-12-17 21:19:04,201] Trial 3688 pruned. [I 2021-12-17 21:19:09,866] Trial 3689 pruned. [I 2021-12-17 21:19:10,506] Trial 3690 pruned. [I 2021-12-17 21:19:12,181] Trial 3691 pruned. [I 2021-12-17 21:19:15,917] Trial 3692 pruned. [I 2021-12-17 21:19:17,756] Trial 3693 pruned. [I 2021-12-17 21:19:18,942] Trial 3694 pruned. [I 2021-12-17 21:19:20,054] Trial 3695 pruned. [I 2021-12-17 21:19:22,028] Trial 3696 pruned. [I 2021-12-17 21:19:25,980] Trial 3697 pruned. [I 2021-12-17 21:19:29,452] Trial 3698 pruned. [I 2021-12-17 21:19:32,961] Trial 3699 pruned. [I 2021-12-17 21:19:34,709] Trial 3700 pruned. [I 2021-12-17 21:19:41,038] Trial 3701 pruned. [I 2021-12-17 21:19:44,791] Trial 3702 pruned. [I 2021-12-17 21:19:46,713] Trial 3703 pruned. [I 2021-12-17 21:19:48,415] Trial 3704 pruned. [I 2021-12-17 21:19:50,342] Trial 3705 pruned. [I 2021-12-17 21:19:52,085] Trial 3706 pruned. [I 2021-12-17 21:19:55,776] Trial 3707 pruned. [I 2021-12-17 21:19:59,931] Trial 3708 pruned. [I 2021-12-17 21:20:07,066] Trial 3709 pruned. [I 2021-12-17 21:20:07,966] Trial 3710 pruned. [I 2021-12-17 21:20:09,276] Trial 3711 pruned. [I 2021-12-17 21:20:11,138] Trial 3712 pruned. [I 2021-12-17 21:20:14,274] Trial 3713 pruned. [I 2021-12-17 21:20:14,968] Trial 3714 pruned. [I 2021-12-17 21:20:15,978] Trial 3715 pruned. [I 2021-12-17 21:20:17,125] Trial 3716 pruned. [I 2021-12-17 21:20:19,017] Trial 3717 pruned. [I 2021-12-17 21:20:21,846] Trial 3718 pruned. [I 2021-12-17 21:20:25,991] Trial 3719 pruned. [I 2021-12-17 21:20:27,833] Trial 3720 pruned. [I 2021-12-17 21:20:31,393] Trial 3721 pruned. [I 2021-12-17 21:22:53,322] Trial 3722 finished with value: 163.6935577392578 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 308, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 21:22:55,010] Trial 3723 pruned. [I 2021-12-17 21:22:57,962] Trial 3724 pruned. [I 2021-12-17 21:23:00,860] Trial 3725 pruned. [I 2021-12-17 21:23:02,835] Trial 3726 pruned. [I 2021-12-17 21:23:05,927] Trial 3727 pruned. [I 2021-12-17 21:23:07,585] Trial 3728 pruned. [I 2021-12-17 21:23:09,259] Trial 3729 pruned. [I 2021-12-17 21:23:10,962] Trial 3730 pruned. [I 2021-12-17 21:23:12,588] Trial 3731 pruned. [I 2021-12-17 21:23:14,346] Trial 3732 pruned. [I 2021-12-17 21:23:18,647] Trial 3733 pruned. [I 2021-12-17 21:23:20,325] Trial 3734 pruned. [I 2021-12-17 21:23:21,086] Trial 3735 pruned. [I 2021-12-17 21:23:23,838] Trial 3736 pruned. [I 2021-12-17 21:23:25,652] Trial 3737 pruned. [I 2021-12-17 21:23:27,341] Trial 3738 pruned. [I 2021-12-17 21:23:28,016] Trial 3739 pruned. [I 2021-12-17 21:23:29,107] Trial 3740 pruned. [I 2021-12-17 21:23:31,960] Trial 3741 pruned. [I 2021-12-17 21:23:33,688] Trial 3742 pruned. [I 2021-12-17 21:23:36,718] Trial 3743 pruned. [I 2021-12-17 21:23:38,410] Trial 3744 pruned. [I 2021-12-17 21:23:39,992] Trial 3745 pruned. [I 2021-12-17 21:23:43,002] Trial 3746 pruned. [I 2021-12-17 21:23:44,691] Trial 3747 pruned. [I 2021-12-17 21:23:46,720] Trial 3748 pruned. [I 2021-12-17 21:23:50,012] Trial 3749 pruned. [I 2021-12-17 21:23:51,817] Trial 3750 pruned. [I 2021-12-17 21:23:53,620] Trial 3751 pruned. [I 2021-12-17 21:23:54,776] Trial 3752 pruned. [I 2021-12-17 21:23:57,565] Trial 3753 pruned. [I 2021-12-17 21:24:00,824] Trial 3754 pruned. [I 2021-12-17 21:24:02,460] Trial 3755 pruned. [I 2021-12-17 21:24:04,915] Trial 3756 pruned. [I 2021-12-17 21:24:06,670] Trial 3757 pruned. [I 2021-12-17 21:24:09,787] Trial 3758 pruned. [I 2021-12-17 21:24:10,593] Trial 3759 pruned. [I 2021-12-17 21:24:11,684] Trial 3760 pruned. [I 2021-12-17 21:24:13,329] Trial 3761 pruned. [I 2021-12-17 21:24:15,107] Trial 3762 pruned. [I 2021-12-17 21:24:18,175] Trial 3763 pruned. [I 2021-12-17 21:24:19,966] Trial 3764 pruned. [I 2021-12-17 21:24:22,990] Trial 3765 pruned. [I 2021-12-17 21:24:23,633] Trial 3766 pruned. [I 2021-12-17 21:24:25,295] Trial 3767 pruned. [I 2021-12-17 21:24:28,460] Trial 3768 pruned. [I 2021-12-17 21:24:30,197] Trial 3769 pruned. [I 2021-12-17 21:24:32,302] Trial 3770 pruned. [I 2021-12-17 21:24:36,674] Trial 3771 pruned. [I 2021-12-17 21:24:41,968] Trial 3772 pruned. [I 2021-12-17 21:24:43,699] Trial 3773 pruned. [I 2021-12-17 21:24:46,629] Trial 3774 pruned. [I 2021-12-17 21:24:48,377] Trial 3775 pruned. [I 2021-12-17 21:24:49,248] Trial 3776 pruned. [I 2021-12-17 21:24:51,050] Trial 3777 pruned. [I 2021-12-17 21:24:53,950] Trial 3778 pruned. [I 2021-12-17 21:24:57,218] Trial 3779 pruned. [I 2021-12-17 21:24:58,959] Trial 3780 pruned. [I 2021-12-17 21:25:00,773] Trial 3781 pruned. [I 2021-12-17 21:25:02,627] Trial 3782 pruned. [I 2021-12-17 21:25:03,756] Trial 3783 pruned. [I 2021-12-17 21:25:04,554] Trial 3784 pruned. [I 2021-12-17 21:25:09,782] Trial 3785 pruned. [I 2021-12-17 21:25:12,765] Trial 3786 pruned. [I 2021-12-17 21:25:14,534] Trial 3787 pruned. [I 2021-12-17 21:25:15,338] Trial 3788 pruned. [I 2021-12-17 21:25:16,535] Trial 3789 pruned. [I 2021-12-17 21:25:24,382] Trial 3790 pruned. [I 2021-12-17 21:25:26,387] Trial 3791 pruned. [I 2021-12-17 21:25:28,009] Trial 3792 pruned. [I 2021-12-17 21:25:29,768] Trial 3793 pruned. [I 2021-12-17 21:25:32,228] Trial 3794 pruned. [I 2021-12-17 21:25:34,029] Trial 3795 pruned. [I 2021-12-17 21:25:37,126] Trial 3796 pruned. [I 2021-12-17 21:25:38,985] Trial 3797 pruned. [I 2021-12-17 21:25:42,111] Trial 3798 pruned. [I 2021-12-17 21:25:44,374] Trial 3799 pruned. [I 2021-12-17 21:25:45,883] Trial 3800 pruned. [I 2021-12-17 21:25:49,218] Trial 3801 pruned. [I 2021-12-17 21:25:51,093] Trial 3802 pruned. [I 2021-12-17 21:25:54,441] Trial 3803 pruned. [I 2021-12-17 21:25:56,239] Trial 3804 pruned. [I 2021-12-17 21:25:57,899] Trial 3805 pruned. [I 2021-12-17 21:26:04,745] Trial 3806 pruned. [I 2021-12-17 21:26:05,633] Trial 3807 pruned. [I 2021-12-17 21:26:07,904] Trial 3808 pruned. [I 2021-12-17 21:26:10,282] Trial 3809 pruned. [I 2021-12-17 21:26:12,116] Trial 3810 pruned. [I 2021-12-17 21:26:12,665] Trial 3811 pruned. [I 2021-12-17 21:26:15,640] Trial 3812 pruned. [I 2021-12-17 21:26:17,711] Trial 3813 pruned. [I 2021-12-17 21:26:20,521] Trial 3814 pruned. [I 2021-12-17 21:26:22,165] Trial 3815 pruned. [I 2021-12-17 21:26:23,956] Trial 3816 pruned. [I 2021-12-17 21:26:29,995] Trial 3817 pruned. [I 2021-12-17 21:26:33,170] Trial 3818 pruned. [I 2021-12-17 21:26:34,986] Trial 3819 pruned. [I 2021-12-17 21:26:37,293] Trial 3820 pruned. [I 2021-12-17 21:26:40,675] Trial 3821 pruned. [I 2021-12-17 21:26:43,953] Trial 3822 pruned. [I 2021-12-17 21:26:47,171] Trial 3823 pruned. [I 2021-12-17 21:26:48,289] Trial 3824 pruned. [I 2021-12-17 21:26:50,083] Trial 3825 pruned. [I 2021-12-17 21:26:53,165] Trial 3826 pruned. [I 2021-12-17 21:26:54,784] Trial 3827 pruned. [I 2021-12-17 21:27:01,609] Trial 3828 pruned. [I 2021-12-17 21:27:02,635] Trial 3829 pruned. [I 2021-12-17 21:27:04,266] Trial 3830 pruned. [I 2021-12-17 21:27:05,642] Trial 3831 pruned. [I 2021-12-17 21:27:09,610] Trial 3832 pruned. [I 2021-12-17 21:27:19,424] Trial 3833 pruned. [I 2021-12-17 21:27:22,842] Trial 3834 pruned. [I 2021-12-17 21:27:25,754] Trial 3835 pruned. [I 2021-12-17 21:27:26,553] Trial 3836 pruned. [I 2021-12-17 21:27:30,080] Trial 3837 pruned. [I 2021-12-17 21:27:32,731] Trial 3838 pruned. [I 2021-12-17 21:27:34,469] Trial 3839 pruned. [I 2021-12-17 21:27:37,270] Trial 3840 pruned. [I 2021-12-17 21:27:38,968] Trial 3841 pruned. [I 2021-12-17 21:27:41,481] Trial 3842 pruned. [I 2021-12-17 21:27:48,454] Trial 3843 pruned. [I 2021-12-17 21:27:50,308] Trial 3844 pruned. [I 2021-12-17 21:27:53,710] Trial 3845 pruned. [I 2021-12-17 21:27:55,464] Trial 3846 pruned. [I 2021-12-17 21:27:56,990] Trial 3847 pruned. [I 2021-12-17 21:27:58,838] Trial 3848 pruned. [I 2021-12-17 21:28:00,119] Trial 3849 pruned. [I 2021-12-17 21:28:03,418] Trial 3850 pruned. [I 2021-12-17 21:28:05,170] Trial 3851 pruned. [I 2021-12-17 21:28:26,940] Trial 3852 pruned. [I 2021-12-17 21:28:28,212] Trial 3853 pruned. [I 2021-12-17 21:28:31,619] Trial 3854 pruned. [I 2021-12-17 21:28:34,411] Trial 3855 pruned. [I 2021-12-17 21:28:35,498] Trial 3856 pruned. [I 2021-12-17 21:28:39,412] Trial 3857 pruned. [I 2021-12-17 21:28:42,737] Trial 3858 pruned. [I 2021-12-17 21:28:44,520] Trial 3859 pruned. [I 2021-12-17 21:28:45,459] Trial 3860 pruned. [I 2021-12-17 21:28:48,855] Trial 3861 pruned. [I 2021-12-17 21:28:51,909] Trial 3862 pruned. [I 2021-12-17 21:28:55,266] Trial 3863 pruned. [I 2021-12-17 21:28:56,602] Trial 3864 pruned. [I 2021-12-17 21:28:58,255] Trial 3865 pruned. [I 2021-12-17 21:29:01,592] Trial 3866 pruned. [I 2021-12-17 21:29:03,322] Trial 3867 pruned. [I 2021-12-17 21:29:04,761] Trial 3868 pruned. [I 2021-12-17 21:29:07,120] Trial 3869 pruned. [I 2021-12-17 21:29:08,855] Trial 3870 pruned. [I 2021-12-17 21:29:10,420] Trial 3871 pruned. [I 2021-12-17 21:29:12,371] Trial 3872 pruned. [I 2021-12-17 21:29:15,670] Trial 3873 pruned. [I 2021-12-17 21:29:17,392] Trial 3874 pruned. [I 2021-12-17 21:29:19,604] Trial 3875 pruned. [I 2021-12-17 21:29:23,445] Trial 3876 pruned. [I 2021-12-17 21:29:26,825] Trial 3877 pruned. [I 2021-12-17 21:29:28,936] Trial 3878 pruned. [I 2021-12-17 21:29:31,949] Trial 3879 pruned. [I 2021-12-17 21:29:32,757] Trial 3880 pruned. [I 2021-12-17 21:29:36,089] Trial 3881 pruned. [I 2021-12-17 21:29:43,733] Trial 3882 pruned. [I 2021-12-17 21:29:44,586] Trial 3883 pruned. [I 2021-12-17 21:29:46,372] Trial 3884 pruned. [I 2021-12-17 21:29:50,237] Trial 3885 pruned. [I 2021-12-17 21:29:53,734] Trial 3886 pruned. [I 2021-12-17 21:29:56,228] Trial 3887 pruned. [I 2021-12-17 21:30:01,121] Trial 3888 pruned. [I 2021-12-17 21:30:04,649] Trial 3889 pruned. [I 2021-12-17 21:30:06,391] Trial 3890 pruned. [I 2021-12-17 21:30:07,517] Trial 3891 pruned. [I 2021-12-17 21:30:09,454] Trial 3892 pruned. [I 2021-12-17 21:30:15,867] Trial 3893 pruned. [I 2021-12-17 21:30:18,846] Trial 3894 pruned. [I 2021-12-17 21:30:20,688] Trial 3895 pruned. [I 2021-12-17 21:30:22,605] Trial 3896 pruned. [I 2021-12-17 21:30:24,368] Trial 3897 pruned. [I 2021-12-17 21:30:26,687] Trial 3898 pruned. [I 2021-12-17 21:30:30,178] Trial 3899 pruned. [I 2021-12-17 21:30:32,190] Trial 3900 pruned. [I 2021-12-17 21:30:34,029] Trial 3901 pruned. [I 2021-12-17 21:30:36,737] Trial 3902 pruned. [I 2021-12-17 21:30:37,533] Trial 3903 pruned. [I 2021-12-17 21:30:39,505] Trial 3904 pruned. [I 2021-12-17 21:30:42,487] Trial 3905 pruned. [I 2021-12-17 21:30:49,561] Trial 3906 pruned. [I 2021-12-17 21:30:50,136] Trial 3907 pruned. [I 2021-12-17 21:30:51,319] Trial 3908 pruned. [I 2021-12-17 21:30:55,482] Trial 3909 pruned. [I 2021-12-17 21:31:03,316] Trial 3910 pruned. [I 2021-12-17 21:31:05,145] Trial 3911 pruned. [I 2021-12-17 21:31:10,089] Trial 3912 pruned. [I 2021-12-17 21:31:12,228] Trial 3913 pruned. [I 2021-12-17 21:31:13,283] Trial 3914 pruned. [I 2021-12-17 21:31:16,850] Trial 3915 pruned. [I 2021-12-17 21:31:19,982] Trial 3916 pruned. [I 2021-12-17 21:31:22,889] Trial 3917 pruned. [I 2021-12-17 21:31:24,185] Trial 3918 pruned. [I 2021-12-17 21:31:25,981] Trial 3919 pruned. [I 2021-12-17 21:31:27,967] Trial 3920 pruned. [I 2021-12-17 21:31:31,571] Trial 3921 pruned. [I 2021-12-17 21:31:33,706] Trial 3922 pruned. [I 2021-12-17 21:31:38,164] Trial 3923 pruned. [I 2021-12-17 21:31:39,711] Trial 3924 pruned. [I 2021-12-17 21:31:41,607] Trial 3925 pruned. [I 2021-12-17 21:31:43,276] Trial 3926 pruned. [I 2021-12-17 21:31:49,674] Trial 3927 pruned. [I 2021-12-17 21:31:50,488] Trial 3928 pruned. [I 2021-12-17 21:31:51,792] Trial 3929 pruned. [I 2021-12-17 21:31:53,711] Trial 3930 pruned. [I 2021-12-17 21:31:55,540] Trial 3931 pruned. [I 2021-12-17 21:31:59,119] Trial 3932 pruned. [I 2021-12-17 21:31:59,793] Trial 3933 pruned. [I 2021-12-17 21:32:01,701] Trial 3934 pruned. [I 2021-12-17 21:32:04,861] Trial 3935 pruned. [I 2021-12-17 21:32:06,298] Trial 3936 pruned. [I 2021-12-17 21:32:09,337] Trial 3937 pruned. [I 2021-12-17 21:32:12,636] Trial 3938 pruned. [I 2021-12-17 21:32:19,099] Trial 3939 pruned. [I 2021-12-17 21:32:20,842] Trial 3940 pruned. [I 2021-12-17 21:32:22,811] Trial 3941 pruned. [I 2021-12-17 21:32:26,881] Trial 3942 pruned. [I 2021-12-17 21:32:30,231] Trial 3943 pruned. [I 2021-12-17 21:32:33,065] Trial 3944 pruned. [I 2021-12-17 21:32:35,177] Trial 3945 pruned. [I 2021-12-17 21:32:37,858] Trial 3946 pruned. [I 2021-12-17 21:32:43,016] Trial 3947 pruned. [I 2021-12-17 21:32:47,492] Trial 3948 pruned. [I 2021-12-17 21:32:49,459] Trial 3949 pruned. [I 2021-12-17 21:32:53,388] Trial 3950 pruned. [I 2021-12-17 21:32:54,093] Trial 3951 pruned. [I 2021-12-17 21:32:57,508] Trial 3952 pruned. [I 2021-12-17 21:33:00,604] Trial 3953 pruned. [I 2021-12-17 21:33:03,229] Trial 3954 pruned. [I 2021-12-17 21:33:05,024] Trial 3955 pruned. [I 2021-12-17 21:33:06,241] Trial 3956 pruned. [I 2021-12-17 21:33:06,897] Trial 3957 pruned. [I 2021-12-17 21:33:08,855] Trial 3958 pruned. [I 2021-12-17 21:33:12,170] Trial 3959 pruned. [I 2021-12-17 21:33:14,031] Trial 3960 pruned. [I 2021-12-17 21:33:17,015] Trial 3961 pruned. [I 2021-12-17 21:33:18,050] Trial 3962 pruned. [I 2021-12-17 21:33:21,438] Trial 3963 pruned. [I 2021-12-17 21:33:27,342] Trial 3964 pruned. [I 2021-12-17 21:33:30,822] Trial 3965 pruned. [I 2021-12-17 21:33:32,681] Trial 3966 pruned. [I 2021-12-17 21:33:35,027] Trial 3967 pruned. [I 2021-12-17 21:33:37,264] Trial 3968 pruned. [I 2021-12-17 21:33:39,011] Trial 3969 pruned. [I 2021-12-17 21:33:42,894] Trial 3970 pruned. [I 2021-12-17 21:33:44,692] Trial 3971 pruned. [I 2021-12-17 21:33:46,136] Trial 3972 pruned. [I 2021-12-17 21:33:47,888] Trial 3973 pruned. [I 2021-12-17 21:33:49,846] Trial 3974 pruned. [I 2021-12-17 21:33:51,151] Trial 3975 pruned. [I 2021-12-17 21:33:54,312] Trial 3976 pruned. [I 2021-12-17 21:33:57,628] Trial 3977 pruned. [I 2021-12-17 21:33:59,028] Trial 3978 pruned. [I 2021-12-17 21:34:00,919] Trial 3979 pruned. [I 2021-12-17 21:34:01,971] Trial 3980 pruned. [I 2021-12-17 21:34:03,735] Trial 3981 pruned. [I 2021-12-17 21:34:05,734] Trial 3982 pruned. [I 2021-12-17 21:34:10,976] Trial 3983 pruned. [I 2021-12-17 21:34:13,003] Trial 3984 pruned. [I 2021-12-17 21:34:14,821] Trial 3985 pruned. [I 2021-12-17 21:34:18,106] Trial 3986 pruned. [I 2021-12-17 21:34:22,906] Trial 3987 pruned. [I 2021-12-17 21:34:39,500] Trial 3988 pruned. [I 2021-12-17 21:34:41,222] Trial 3989 pruned. [I 2021-12-17 21:34:42,509] Trial 3990 pruned. [I 2021-12-17 21:34:51,432] Trial 3991 pruned. [I 2021-12-17 21:34:54,201] Trial 3992 pruned. [I 2021-12-17 21:34:55,956] Trial 3993 pruned. [I 2021-12-17 21:34:57,439] Trial 3994 pruned. [I 2021-12-17 21:34:59,271] Trial 3995 pruned. [I 2021-12-17 21:35:02,177] Trial 3996 pruned. [I 2021-12-17 21:35:08,671] Trial 3997 pruned. [I 2021-12-17 21:35:10,527] Trial 3998 pruned. [I 2021-12-17 21:35:11,409] Trial 3999 pruned. [I 2021-12-17 21:35:13,584] Trial 4000 pruned. [I 2021-12-17 21:35:15,227] Trial 4001 pruned. [I 2021-12-17 21:35:18,827] Trial 4002 pruned. [I 2021-12-17 21:35:20,602] Trial 4003 pruned. [I 2021-12-17 21:35:21,292] Trial 4004 pruned. [I 2021-12-17 21:35:23,187] Trial 4005 pruned. [I 2021-12-17 21:35:25,221] Trial 4006 pruned. [I 2021-12-17 21:35:28,284] Trial 4007 pruned. [I 2021-12-17 21:35:30,120] Trial 4008 pruned. [I 2021-12-17 21:35:31,967] Trial 4009 pruned. [I 2021-12-17 21:35:35,901] Trial 4010 pruned. [I 2021-12-17 21:35:38,070] Trial 4011 pruned. [I 2021-12-17 21:35:39,183] Trial 4012 pruned. [I 2021-12-17 21:35:41,617] Trial 4013 pruned. [I 2021-12-17 21:35:43,932] Trial 4014 pruned. [I 2021-12-17 21:35:47,457] Trial 4015 pruned. [I 2021-12-17 21:35:48,475] Trial 4016 pruned. [I 2021-12-17 21:35:50,367] Trial 4017 pruned. [I 2021-12-17 21:35:53,252] Trial 4018 pruned. [I 2021-12-17 21:35:59,217] Trial 4019 pruned. [I 2021-12-17 21:36:01,242] Trial 4020 pruned. [I 2021-12-17 21:36:03,575] Trial 4021 pruned. [I 2021-12-17 21:36:05,479] Trial 4022 pruned. [I 2021-12-17 21:36:06,358] Trial 4023 pruned. [I 2021-12-17 21:36:10,728] Trial 4024 pruned. [I 2021-12-17 21:36:12,706] Trial 4025 pruned. [I 2021-12-17 21:36:14,454] Trial 4026 pruned. [I 2021-12-17 21:36:15,080] Trial 4027 pruned. [I 2021-12-17 21:36:16,232] Trial 4028 pruned. [I 2021-12-17 21:36:19,793] Trial 4029 pruned. [I 2021-12-17 21:38:49,865] Trial 4030 finished with value: 173.1588134765625 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 346, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 21:38:55,840] Trial 4031 pruned. [I 2021-12-17 21:38:58,096] Trial 4032 pruned. [I 2021-12-17 21:39:01,624] Trial 4033 pruned. [I 2021-12-17 21:39:04,474] Trial 4034 pruned. [I 2021-12-17 21:39:08,231] Trial 4035 pruned. [I 2021-12-17 21:39:12,787] Trial 4036 pruned. [I 2021-12-17 21:39:14,598] Trial 4037 pruned. [I 2021-12-17 21:39:16,111] Trial 4038 pruned. [I 2021-12-17 21:39:17,485] Trial 4039 pruned. [I 2021-12-17 21:39:22,730] Trial 4040 pruned. [I 2021-12-17 21:39:24,969] Trial 4041 pruned. [I 2021-12-17 21:39:26,913] Trial 4042 pruned. [I 2021-12-17 21:39:28,622] Trial 4043 pruned. [I 2021-12-17 21:39:29,738] Trial 4044 pruned. [I 2021-12-17 21:39:33,122] Trial 4045 pruned. [I 2021-12-17 21:39:39,104] Trial 4046 pruned. [I 2021-12-17 21:39:39,910] Trial 4047 pruned. [I 2021-12-17 21:39:41,792] Trial 4048 pruned. [I 2021-12-17 21:39:44,862] Trial 4049 pruned. [I 2021-12-17 21:39:47,152] Trial 4050 pruned. [I 2021-12-17 21:39:48,155] Trial 4051 pruned. [I 2021-12-17 21:39:51,389] Trial 4052 pruned. [I 2021-12-17 21:39:53,287] Trial 4053 pruned. [I 2021-12-17 21:39:56,647] Trial 4054 pruned. [I 2021-12-17 21:40:00,558] Trial 4055 pruned. [I 2021-12-17 21:40:02,405] Trial 4056 pruned. [I 2021-12-17 21:40:05,650] Trial 4057 pruned. [I 2021-12-17 21:40:07,334] Trial 4058 pruned. [I 2021-12-17 21:40:11,085] Trial 4059 pruned. [I 2021-12-17 21:40:15,458] Trial 4060 pruned. [I 2021-12-17 21:40:17,222] Trial 4061 pruned. [I 2021-12-17 21:40:20,427] Trial 4062 pruned. [I 2021-12-17 21:40:25,351] Trial 4063 pruned. [I 2021-12-17 21:40:26,546] Trial 4064 pruned. [I 2021-12-17 21:40:28,621] Trial 4065 pruned. [I 2021-12-17 21:40:30,929] Trial 4066 pruned. [I 2021-12-17 21:40:34,103] Trial 4067 pruned. [I 2021-12-17 21:40:36,230] Trial 4068 pruned. [I 2021-12-17 21:40:38,107] Trial 4069 pruned. [I 2021-12-17 21:40:45,021] Trial 4070 pruned. [I 2021-12-17 21:40:45,749] Trial 4071 pruned. [I 2021-12-17 21:40:49,395] Trial 4072 pruned. [I 2021-12-17 21:40:52,586] Trial 4073 pruned. [I 2021-12-17 21:40:54,784] Trial 4074 pruned. [I 2021-12-17 21:40:55,651] Trial 4075 pruned. [I 2021-12-17 21:40:59,224] Trial 4076 pruned. [I 2021-12-17 21:41:00,478] Trial 4077 pruned. [I 2021-12-17 21:41:03,533] Trial 4078 pruned. [I 2021-12-17 21:41:07,446] Trial 4079 pruned. [I 2021-12-17 21:41:09,391] Trial 4080 pruned. [I 2021-12-17 21:41:10,672] Trial 4081 pruned. [I 2021-12-17 21:41:15,734] Trial 4082 pruned. [I 2021-12-17 21:41:22,905] Trial 4083 pruned. [I 2021-12-17 21:41:25,845] Trial 4084 pruned. [I 2021-12-17 21:41:28,413] Trial 4085 pruned. [I 2021-12-17 21:41:30,161] Trial 4086 pruned. [I 2021-12-17 21:41:32,056] Trial 4087 pruned. [I 2021-12-17 21:41:32,790] Trial 4088 pruned. [I 2021-12-17 21:41:36,032] Trial 4089 pruned. [I 2021-12-17 21:41:38,090] Trial 4090 pruned. [I 2021-12-17 21:41:41,478] Trial 4091 pruned. [I 2021-12-17 21:41:45,109] Trial 4092 pruned. [I 2021-12-17 21:41:50,553] Trial 4093 pruned. [I 2021-12-17 21:41:53,563] Trial 4094 pruned. [I 2021-12-17 21:41:55,407] Trial 4095 pruned. [I 2021-12-17 21:41:56,401] Trial 4096 pruned. [I 2021-12-17 21:42:02,007] Trial 4097 pruned. [I 2021-12-17 21:42:03,595] Trial 4098 pruned. [I 2021-12-17 21:42:04,246] Trial 4099 pruned. [I 2021-12-17 21:42:06,079] Trial 4100 pruned. [I 2021-12-17 21:42:08,882] Trial 4101 pruned. [I 2021-12-17 21:42:10,649] Trial 4102 pruned. [I 2021-12-17 21:42:15,856] Trial 4103 pruned. [I 2021-12-17 21:42:18,003] Trial 4104 pruned. [I 2021-12-17 21:42:23,004] Trial 4105 pruned. [I 2021-12-17 21:42:38,695] Trial 4106 pruned. [I 2021-12-17 21:42:40,844] Trial 4107 pruned. [I 2021-12-17 21:42:44,326] Trial 4108 pruned. [I 2021-12-17 21:42:47,772] Trial 4109 pruned. [I 2021-12-17 21:42:51,537] Trial 4110 pruned. [I 2021-12-17 21:42:55,819] Trial 4111 pruned. [I 2021-12-17 21:42:56,991] Trial 4112 pruned. [I 2021-12-17 21:42:58,327] Trial 4113 pruned. [I 2021-12-17 21:43:01,963] Trial 4114 pruned. [I 2021-12-17 21:43:05,210] Trial 4115 pruned. [I 2021-12-17 21:43:08,155] Trial 4116 pruned. [I 2021-12-17 21:43:09,954] Trial 4117 pruned. [I 2021-12-17 21:43:14,151] Trial 4118 pruned. [I 2021-12-17 21:43:14,990] Trial 4119 pruned. [I 2021-12-17 21:43:17,042] Trial 4120 pruned. [I 2021-12-17 21:43:20,584] Trial 4121 pruned. [I 2021-12-17 21:43:25,105] Trial 4122 pruned. [I 2021-12-17 21:43:28,287] Trial 4123 pruned. [I 2021-12-17 21:43:29,104] Trial 4124 pruned. [I 2021-12-17 21:43:30,581] Trial 4125 pruned. [I 2021-12-17 21:43:32,580] Trial 4126 pruned. [I 2021-12-17 21:43:34,732] Trial 4127 pruned. [I 2021-12-17 21:43:38,107] Trial 4128 pruned. [I 2021-12-17 21:43:47,285] Trial 4129 pruned. [I 2021-12-17 21:43:49,215] Trial 4130 pruned. [I 2021-12-17 21:43:55,021] Trial 4131 pruned. [I 2021-12-17 21:43:56,168] Trial 4132 pruned. [I 2021-12-17 21:43:59,046] Trial 4133 pruned. [I 2021-12-17 21:44:02,437] Trial 4134 pruned. [I 2021-12-17 21:44:04,185] Trial 4135 pruned. [I 2021-12-17 21:44:05,527] Trial 4136 pruned. [I 2021-12-17 21:44:09,032] Trial 4137 pruned. [I 2021-12-17 21:44:12,780] Trial 4138 pruned. [I 2021-12-17 21:44:18,262] Trial 4139 pruned. [I 2021-12-17 21:44:22,274] Trial 4140 pruned. [I 2021-12-17 21:44:31,020] Trial 4141 pruned. [I 2021-12-17 21:44:34,855] Trial 4142 pruned. [I 2021-12-17 21:44:35,713] Trial 4143 pruned. [I 2021-12-17 21:44:38,645] Trial 4144 pruned. [I 2021-12-17 21:47:16,858] Trial 4145 finished with value: 172.8875274658203 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 376, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 21:47:18,709] Trial 4146 pruned. [I 2021-12-17 21:47:21,122] Trial 4147 pruned. [I 2021-12-17 21:47:22,078] Trial 4148 pruned. [I 2021-12-17 21:47:25,550] Trial 4149 pruned. [I 2021-12-17 21:47:27,069] Trial 4150 pruned. [I 2021-12-17 21:47:30,231] Trial 4151 pruned. [I 2021-12-17 21:47:36,427] Trial 4152 pruned. [I 2021-12-17 21:47:37,991] Trial 4153 pruned. [I 2021-12-17 21:47:39,850] Trial 4154 pruned. [I 2021-12-17 21:47:42,649] Trial 4155 pruned. [I 2021-12-17 21:47:44,410] Trial 4156 pruned. [I 2021-12-17 21:50:52,430] Trial 4157 finished with value: 161.8147430419922 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 486, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 21:50:56,600] Trial 4158 pruned. [I 2021-12-17 21:54:06,666] Trial 4159 finished with value: 165.89573669433594 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 504, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 21:54:08,776] Trial 4160 pruned. [I 2021-12-17 21:54:11,306] Trial 4161 pruned. [I 2021-12-17 21:54:14,028] Trial 4162 pruned. [I 2021-12-17 21:54:16,284] Trial 4163 pruned. [I 2021-12-17 21:54:18,653] Trial 4164 pruned. [I 2021-12-17 21:54:22,754] Trial 4165 pruned. [I 2021-12-17 21:54:24,989] Trial 4166 pruned. [I 2021-12-17 21:54:29,262] Trial 4167 pruned. [I 2021-12-17 21:54:31,487] Trial 4168 pruned. [I 2021-12-17 21:54:33,139] Trial 4169 pruned. [I 2021-12-17 21:54:34,062] Trial 4170 pruned. [I 2021-12-17 21:54:36,381] Trial 4171 pruned. [I 2021-12-17 21:54:37,477] Trial 4172 pruned. [I 2021-12-17 21:54:39,732] Trial 4173 pruned. [I 2021-12-17 21:54:42,070] Trial 4174 pruned. [I 2021-12-17 21:54:43,333] Trial 4175 pruned. [I 2021-12-17 21:54:47,476] Trial 4176 pruned. [I 2021-12-17 21:54:51,722] Trial 4177 pruned. [I 2021-12-17 21:54:55,805] Trial 4178 pruned. [I 2021-12-17 21:55:03,555] Trial 4179 pruned. [I 2021-12-17 21:55:05,855] Trial 4180 pruned. [I 2021-12-17 21:55:11,904] Trial 4181 pruned. [I 2021-12-17 21:55:18,879] Trial 4182 pruned. [I 2021-12-17 21:55:21,071] Trial 4183 pruned. [I 2021-12-17 21:55:23,800] Trial 4184 pruned. [I 2021-12-17 21:55:25,178] Trial 4185 pruned. [I 2021-12-17 21:55:29,330] Trial 4186 pruned. [I 2021-12-17 21:58:32,435] Trial 4187 finished with value: 171.46148681640625 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 470, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 21:58:40,823] Trial 4188 pruned. [I 2021-12-17 21:58:44,514] Trial 4189 pruned. [I 2021-12-17 21:58:48,515] Trial 4190 pruned. [I 2021-12-17 21:58:50,129] Trial 4191 pruned. [I 2021-12-17 21:58:54,361] Trial 4192 pruned. [I 2021-12-17 21:58:58,412] Trial 4193 pruned. [I 2021-12-17 21:59:02,369] Trial 4194 pruned. [I 2021-12-17 21:59:03,111] Trial 4195 pruned. [I 2021-12-17 22:02:08,728] Trial 4196 finished with value: 185.7809295654297 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 476, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 22:02:09,384] Trial 4197 pruned. [I 2021-12-17 22:02:11,651] Trial 4198 pruned. [I 2021-12-17 22:02:15,851] Trial 4199 pruned. [I 2021-12-17 22:02:18,007] Trial 4200 pruned. [I 2021-12-17 22:02:20,341] Trial 4201 pruned. [I 2021-12-17 22:02:26,321] Trial 4202 pruned. [I 2021-12-17 22:02:28,510] Trial 4203 pruned. [I 2021-12-17 22:02:32,866] Trial 4204 pruned. [I 2021-12-17 22:02:35,300] Trial 4205 pruned. [I 2021-12-17 22:02:38,084] Trial 4206 pruned. [I 2021-12-17 22:02:40,203] Trial 4207 pruned. [I 2021-12-17 22:02:42,335] Trial 4208 pruned. [I 2021-12-17 22:02:43,676] Trial 4209 pruned. [I 2021-12-17 22:02:47,666] Trial 4210 pruned. [I 2021-12-17 22:02:49,919] Trial 4211 pruned. [I 2021-12-17 22:02:52,140] Trial 4212 pruned. [I 2021-12-17 22:02:53,887] Trial 4213 pruned. [I 2021-12-17 22:02:57,849] Trial 4214 pruned. [I 2021-12-17 22:03:02,240] Trial 4215 pruned. [I 2021-12-17 22:03:04,343] Trial 4216 pruned. [I 2021-12-17 22:03:05,303] Trial 4217 pruned. [I 2021-12-17 22:03:09,206] Trial 4218 pruned. [I 2021-12-17 22:03:11,442] Trial 4219 pruned. [I 2021-12-17 22:03:12,149] Trial 4220 pruned. [I 2021-12-17 22:03:16,593] Trial 4221 pruned. [I 2021-12-17 22:03:20,888] Trial 4222 pruned. [I 2021-12-17 22:03:23,003] Trial 4223 pruned. [I 2021-12-17 22:03:25,276] Trial 4224 pruned. [I 2021-12-17 22:03:27,398] Trial 4225 pruned. [I 2021-12-17 22:03:31,353] Trial 4226 pruned. [I 2021-12-17 22:03:35,263] Trial 4227 pruned. [I 2021-12-17 22:03:37,502] Trial 4228 pruned. [I 2021-12-17 22:03:39,666] Trial 4229 pruned. [I 2021-12-17 22:03:42,248] Trial 4230 pruned. [I 2021-12-17 22:03:44,349] Trial 4231 pruned. [I 2021-12-17 22:03:48,486] Trial 4232 pruned. [I 2021-12-17 22:03:49,842] Trial 4233 pruned. [I 2021-12-17 22:03:51,954] Trial 4234 pruned. [I 2021-12-17 22:03:53,603] Trial 4235 pruned. [I 2021-12-17 22:03:55,961] Trial 4236 pruned. [I 2021-12-17 22:04:00,219] Trial 4237 pruned. [I 2021-12-17 22:04:04,321] Trial 4238 pruned. [I 2021-12-17 22:04:06,433] Trial 4239 pruned. [I 2021-12-17 22:04:07,379] Trial 4240 pruned. [I 2021-12-17 22:04:11,512] Trial 4241 pruned. [I 2021-12-17 22:04:13,391] Trial 4242 pruned. [I 2021-12-17 22:04:17,209] Trial 4243 pruned. [I 2021-12-17 22:04:17,948] Trial 4244 pruned. [I 2021-12-17 22:04:22,083] Trial 4245 pruned. [I 2021-12-17 22:04:24,177] Trial 4246 pruned. [I 2021-12-17 22:04:27,967] Trial 4247 pruned. [I 2021-12-17 22:04:33,817] Trial 4248 pruned. [I 2021-12-17 22:04:37,615] Trial 4249 pruned. [I 2021-12-17 22:04:38,701] Trial 4250 pruned. [I 2021-12-17 22:04:41,180] Trial 4251 pruned. [I 2021-12-17 22:04:44,223] Trial 4252 pruned. [I 2021-12-17 22:04:48,205] Trial 4253 pruned. [I 2021-12-17 22:04:56,019] Trial 4254 pruned. [I 2021-12-17 22:04:58,141] Trial 4255 pruned. [I 2021-12-17 22:05:01,780] Trial 4256 pruned. [I 2021-12-17 22:05:10,390] Trial 4257 pruned. [I 2021-12-17 22:05:14,212] Trial 4258 pruned. [I 2021-12-17 22:05:16,350] Trial 4259 pruned. [I 2021-12-17 22:05:18,625] Trial 4260 pruned. [I 2021-12-17 22:05:22,416] Trial 4261 pruned. [I 2021-12-17 22:05:24,481] Trial 4262 pruned. [I 2021-12-17 22:05:27,661] Trial 4263 pruned. [I 2021-12-17 22:05:29,092] Trial 4264 pruned. [I 2021-12-17 22:05:30,505] Trial 4265 pruned. [I 2021-12-17 22:05:32,703] Trial 4266 pruned. [I 2021-12-17 22:05:38,109] Trial 4267 pruned. [I 2021-12-17 22:05:38,736] Trial 4268 pruned. [I 2021-12-17 22:05:42,752] Trial 4269 pruned. [I 2021-12-17 22:05:45,979] Trial 4270 pruned. [I 2021-12-17 22:05:48,753] Trial 4271 pruned. [I 2021-12-17 22:05:51,655] Trial 4272 pruned. [I 2021-12-17 22:05:53,783] Trial 4273 pruned. [I 2021-12-17 22:05:59,642] Trial 4274 pruned. [I 2021-12-17 22:06:01,618] Trial 4275 pruned. [I 2021-12-17 22:06:03,951] Trial 4276 pruned. [I 2021-12-17 22:06:08,049] Trial 4277 pruned. [I 2021-12-17 22:06:10,872] Trial 4278 pruned. [I 2021-12-17 22:06:14,437] Trial 4279 pruned. [I 2021-12-17 22:06:15,560] Trial 4280 pruned. [I 2021-12-17 22:06:17,799] Trial 4281 pruned. [I 2021-12-17 22:06:19,887] Trial 4282 pruned. [I 2021-12-17 22:06:23,955] Trial 4283 pruned. [I 2021-12-17 22:06:27,683] Trial 4284 pruned. [I 2021-12-17 22:06:28,788] Trial 4285 pruned. [I 2021-12-17 22:06:34,955] Trial 4286 pruned. [I 2021-12-17 22:06:38,743] Trial 4287 pruned. [I 2021-12-17 22:06:39,835] Trial 4288 pruned. [I 2021-12-17 22:06:43,145] Trial 4289 pruned. [I 2021-12-17 22:06:47,144] Trial 4290 pruned. [I 2021-12-17 22:06:53,463] Trial 4291 pruned. [I 2021-12-17 22:06:54,173] Trial 4292 pruned. [I 2021-12-17 22:06:57,938] Trial 4293 pruned. [I 2021-12-17 22:07:01,605] Trial 4294 pruned. [I 2021-12-17 22:07:03,839] Trial 4295 pruned. [I 2021-12-17 22:07:07,572] Trial 4296 pruned. [I 2021-12-17 22:07:09,785] Trial 4297 pruned. [I 2021-12-17 22:07:12,087] Trial 4298 pruned. [I 2021-12-17 22:07:19,024] Trial 4299 pruned. [I 2021-12-17 22:07:20,508] Trial 4300 pruned. [I 2021-12-17 22:07:25,876] Trial 4301 pruned. [I 2021-12-17 22:07:29,770] Trial 4302 pruned. [I 2021-12-17 22:07:31,797] Trial 4303 pruned. [I 2021-12-17 22:07:34,048] Trial 4304 pruned. [I 2021-12-17 22:07:35,479] Trial 4305 pruned. [I 2021-12-17 22:07:37,603] Trial 4306 pruned. [I 2021-12-17 22:07:39,507] Trial 4307 pruned. [I 2021-12-17 22:07:43,271] Trial 4308 pruned. [I 2021-12-17 22:07:45,730] Trial 4309 pruned. [I 2021-12-17 22:07:49,370] Trial 4310 pruned. [I 2021-12-17 22:07:52,227] Trial 4311 pruned. [I 2021-12-17 22:07:53,794] Trial 4312 pruned. [I 2021-12-17 22:07:57,639] Trial 4313 pruned. [I 2021-12-17 22:07:58,586] Trial 4314 pruned. [I 2021-12-17 22:08:01,034] Trial 4315 pruned. [I 2021-12-17 22:08:04,692] Trial 4316 pruned. [I 2021-12-17 22:08:05,649] Trial 4317 pruned. [I 2021-12-17 22:08:07,207] Trial 4318 pruned. [I 2021-12-17 22:08:11,043] Trial 4319 pruned. [I 2021-12-17 22:08:14,643] Trial 4320 pruned. [I 2021-12-17 22:08:17,645] Trial 4321 pruned. [I 2021-12-17 22:08:19,717] Trial 4322 pruned. [I 2021-12-17 22:08:24,748] Trial 4323 pruned. [I 2021-12-17 22:08:27,420] Trial 4324 pruned. [I 2021-12-17 22:08:29,666] Trial 4325 pruned. [I 2021-12-17 22:08:31,871] Trial 4326 pruned. [I 2021-12-17 22:08:33,847] Trial 4327 pruned. [I 2021-12-17 22:08:35,093] Trial 4328 pruned. [I 2021-12-17 22:08:40,941] Trial 4329 pruned. [I 2021-12-17 22:08:42,206] Trial 4330 pruned. [I 2021-12-17 22:08:43,459] Trial 4331 pruned. [I 2021-12-17 22:08:45,429] Trial 4332 pruned. [I 2021-12-17 22:08:49,327] Trial 4333 pruned. [I 2021-12-17 22:08:53,615] Trial 4334 pruned. [I 2021-12-17 22:08:59,803] Trial 4335 pruned. [I 2021-12-17 22:09:00,958] Trial 4336 pruned. [I 2021-12-17 22:09:02,858] Trial 4337 pruned. [I 2021-12-17 22:09:06,810] Trial 4338 pruned. [I 2021-12-17 22:09:12,185] Trial 4339 pruned. [I 2021-12-17 22:09:22,102] Trial 4340 pruned. [I 2021-12-17 22:09:26,162] Trial 4341 pruned. [I 2021-12-17 22:09:26,886] Trial 4342 pruned. [I 2021-12-17 22:09:31,779] Trial 4343 pruned. [I 2021-12-17 22:09:33,430] Trial 4344 pruned. [I 2021-12-17 22:09:35,352] Trial 4345 pruned. [I 2021-12-17 22:09:39,519] Trial 4346 pruned. [I 2021-12-17 22:09:44,701] Trial 4347 pruned. [I 2021-12-17 22:09:46,821] Trial 4348 pruned. [I 2021-12-17 22:09:50,298] Trial 4349 pruned. [I 2021-12-17 22:09:52,096] Trial 4350 pruned. [I 2021-12-17 22:09:53,850] Trial 4351 pruned. [I 2021-12-17 22:09:57,511] Trial 4352 pruned. [I 2021-12-17 22:09:58,934] Trial 4353 pruned. [I 2021-12-17 22:10:02,107] Trial 4354 pruned. [I 2021-12-17 22:10:04,884] Trial 4355 pruned. [I 2021-12-17 22:10:08,835] Trial 4356 pruned. [I 2021-12-17 22:10:12,954] Trial 4357 pruned. [I 2021-12-17 22:10:16,627] Trial 4358 pruned. [I 2021-12-17 22:10:19,381] Trial 4359 pruned. [I 2021-12-17 22:10:22,978] Trial 4360 pruned. [I 2021-12-17 22:10:24,452] Trial 4361 pruned. [I 2021-12-17 22:10:28,011] Trial 4362 pruned. [I 2021-12-17 22:10:31,811] Trial 4363 pruned. [I 2021-12-17 22:10:33,403] Trial 4364 pruned. [I 2021-12-17 22:10:34,015] Trial 4365 pruned. [I 2021-12-17 22:10:37,874] Trial 4366 pruned. [I 2021-12-17 22:10:39,888] Trial 4367 pruned. [I 2021-12-17 22:10:43,290] Trial 4368 pruned. [I 2021-12-17 22:10:45,330] Trial 4369 pruned. [I 2021-12-17 22:10:48,333] Trial 4370 pruned. [I 2021-12-17 22:10:49,661] Trial 4371 pruned. [I 2021-12-17 22:10:50,854] Trial 4372 pruned. [I 2021-12-17 22:10:58,138] Trial 4373 pruned. [I 2021-12-17 22:10:59,468] Trial 4374 pruned. [I 2021-12-17 22:11:01,787] Trial 4375 pruned. [I 2021-12-17 22:11:03,907] Trial 4376 pruned. [I 2021-12-17 22:11:05,474] Trial 4377 pruned. [I 2021-12-17 22:11:07,223] Trial 4378 pruned. [I 2021-12-17 22:11:08,661] Trial 4379 pruned. [I 2021-12-17 22:11:11,141] Trial 4380 pruned. [I 2021-12-17 22:11:13,091] Trial 4381 pruned. [I 2021-12-17 22:11:16,517] Trial 4382 pruned. [I 2021-12-17 22:11:18,484] Trial 4383 pruned. [I 2021-12-17 22:11:19,803] Trial 4384 pruned. [I 2021-12-17 22:11:21,786] Trial 4385 pruned. [I 2021-12-17 22:11:23,947] Trial 4386 pruned. [I 2021-12-17 22:11:25,833] Trial 4387 pruned. [I 2021-12-17 22:11:27,309] Trial 4388 pruned. [I 2021-12-17 22:11:28,106] Trial 4389 pruned. [I 2021-12-17 22:11:31,791] Trial 4390 pruned. [I 2021-12-17 22:11:35,771] Trial 4391 pruned. [I 2021-12-17 22:11:37,631] Trial 4392 pruned. [I 2021-12-17 22:11:40,728] Trial 4393 pruned. [I 2021-12-17 22:11:44,417] Trial 4394 pruned. [I 2021-12-17 22:11:45,947] Trial 4395 pruned. [I 2021-12-17 22:11:51,698] Trial 4396 pruned. [I 2021-12-17 22:11:55,661] Trial 4397 pruned. [I 2021-12-17 22:11:57,540] Trial 4398 pruned. [I 2021-12-17 22:12:00,205] Trial 4399 pruned. [I 2021-12-17 22:12:05,987] Trial 4400 pruned. [I 2021-12-17 22:12:07,314] Trial 4401 pruned. [I 2021-12-17 22:12:09,632] Trial 4402 pruned. [I 2021-12-17 22:12:11,697] Trial 4403 pruned. [I 2021-12-17 22:12:14,883] Trial 4404 pruned. [I 2021-12-17 22:12:18,396] Trial 4405 pruned. [I 2021-12-17 22:12:22,031] Trial 4406 pruned. [I 2021-12-17 22:12:26,312] Trial 4407 pruned. [I 2021-12-17 22:12:27,148] Trial 4408 pruned. [I 2021-12-17 22:12:29,377] Trial 4409 pruned. [I 2021-12-17 22:12:31,260] Trial 4410 pruned. [I 2021-12-17 22:12:35,297] Trial 4411 pruned. [I 2021-12-17 22:12:37,143] Trial 4412 pruned. [I 2021-12-17 22:12:37,888] Trial 4413 pruned. [I 2021-12-17 22:12:39,854] Trial 4414 pruned. [I 2021-12-17 22:12:43,563] Trial 4415 pruned. [I 2021-12-17 22:12:45,184] Trial 4416 pruned. [I 2021-12-17 22:12:47,209] Trial 4417 pruned. [I 2021-12-17 22:12:49,195] Trial 4418 pruned. [I 2021-12-17 22:12:52,622] Trial 4419 pruned. [I 2021-12-17 22:12:56,068] Trial 4420 pruned. [I 2021-12-17 22:12:58,263] Trial 4421 pruned. [I 2021-12-17 22:13:02,865] Trial 4422 pruned. [I 2021-12-17 22:13:06,597] Trial 4423 pruned. [I 2021-12-17 22:13:10,671] Trial 4424 pruned. [I 2021-12-17 22:13:11,586] Trial 4425 pruned. [I 2021-12-17 22:13:19,679] Trial 4426 pruned. [I 2021-12-17 22:13:21,494] Trial 4427 pruned. [I 2021-12-17 22:13:24,647] Trial 4428 pruned. [I 2021-12-17 22:13:26,749] Trial 4429 pruned. [I 2021-12-17 22:13:29,382] Trial 4430 pruned. [I 2021-12-17 22:13:31,492] Trial 4431 pruned. [I 2021-12-17 22:13:33,145] Trial 4432 pruned. [I 2021-12-17 22:13:36,529] Trial 4433 pruned. [I 2021-12-17 22:13:37,342] Trial 4434 pruned. [I 2021-12-17 22:13:39,290] Trial 4435 pruned. [I 2021-12-17 22:13:40,511] Trial 4436 pruned. [I 2021-12-17 22:13:43,814] Trial 4437 pruned. [I 2021-12-17 22:13:44,925] Trial 4438 pruned. [I 2021-12-17 22:13:46,820] Trial 4439 pruned. [I 2021-12-17 22:13:50,757] Trial 4440 pruned. [I 2021-12-17 22:13:52,991] Trial 4441 pruned. [I 2021-12-17 22:13:56,414] Trial 4442 pruned. [I 2021-12-17 22:14:00,023] Trial 4443 pruned. [I 2021-12-17 22:14:01,949] Trial 4444 pruned. [I 2021-12-17 22:14:03,907] Trial 4445 pruned. [I 2021-12-17 22:14:06,069] Trial 4446 pruned. [I 2021-12-17 22:14:08,284] Trial 4447 pruned. [I 2021-12-17 22:14:10,293] Trial 4448 pruned. [I 2021-12-17 22:14:11,537] Trial 4449 pruned. [I 2021-12-17 22:14:14,594] Trial 4450 pruned. [I 2021-12-17 22:14:16,435] Trial 4451 pruned. [I 2021-12-17 22:14:20,868] Trial 4452 pruned. [I 2021-12-17 22:14:22,505] Trial 4453 pruned. [I 2021-12-17 22:14:25,786] Trial 4454 pruned. [I 2021-12-17 22:14:29,454] Trial 4455 pruned. [I 2021-12-17 22:14:30,340] Trial 4456 pruned. [I 2021-12-17 22:14:31,425] Trial 4457 pruned. [I 2021-12-17 22:14:38,671] Trial 4458 pruned. [I 2021-12-17 22:14:41,691] Trial 4459 pruned. [I 2021-12-17 22:14:43,422] Trial 4460 pruned. [I 2021-12-17 22:14:45,168] Trial 4461 pruned. [I 2021-12-17 22:14:45,880] Trial 4462 pruned. [I 2021-12-17 22:14:48,840] Trial 4463 pruned. [I 2021-12-17 22:14:51,106] Trial 4464 pruned. [I 2021-12-17 22:14:54,222] Trial 4465 pruned. [I 2021-12-17 22:14:55,977] Trial 4466 pruned. [I 2021-12-17 22:14:58,242] Trial 4467 pruned. [I 2021-12-17 22:15:01,025] Trial 4468 pruned. [I 2021-12-17 22:15:02,645] Trial 4469 pruned. [I 2021-12-17 22:15:08,811] Trial 4470 pruned. [I 2021-12-17 22:15:15,738] Trial 4471 pruned. [I 2021-12-17 22:15:17,548] Trial 4472 pruned. [I 2021-12-17 22:15:18,813] Trial 4473 pruned. [I 2021-12-17 22:15:20,871] Trial 4474 pruned. [I 2021-12-17 22:15:22,769] Trial 4475 pruned. [I 2021-12-17 22:15:25,301] Trial 4476 pruned. [I 2021-12-17 22:15:27,232] Trial 4477 pruned. [I 2021-12-17 22:15:29,317] Trial 4478 pruned. [I 2021-12-17 22:15:31,524] Trial 4479 pruned. [I 2021-12-17 22:15:32,130] Trial 4480 pruned. [I 2021-12-17 22:15:33,659] Trial 4481 pruned. [I 2021-12-17 22:15:36,478] Trial 4482 pruned. [I 2021-12-17 22:15:38,420] Trial 4483 pruned. [I 2021-12-17 22:15:40,261] Trial 4484 pruned. [I 2021-12-17 22:15:40,991] Trial 4485 pruned. [I 2021-12-17 22:15:44,395] Trial 4486 pruned. [I 2021-12-17 22:15:46,238] Trial 4487 pruned. [I 2021-12-17 22:15:48,148] Trial 4488 pruned. [I 2021-12-17 22:15:53,133] Trial 4489 pruned. [I 2021-12-17 22:15:55,109] Trial 4490 pruned. [I 2021-12-17 22:16:01,026] Trial 4491 pruned. [I 2021-12-17 22:16:03,006] Trial 4492 pruned. [I 2021-12-17 22:16:07,705] Trial 4493 pruned. [I 2021-12-17 22:16:11,405] Trial 4494 pruned. [I 2021-12-17 22:16:14,267] Trial 4495 pruned. [I 2021-12-17 22:16:17,076] Trial 4496 pruned. [I 2021-12-17 22:16:19,160] Trial 4497 pruned. [I 2021-12-17 22:16:30,647] Trial 4498 pruned. [I 2021-12-17 22:16:34,283] Trial 4499 pruned. [I 2021-12-17 22:16:35,554] Trial 4500 pruned. [I 2021-12-17 22:16:39,688] Trial 4501 pruned. [I 2021-12-17 22:16:41,252] Trial 4502 pruned. [I 2021-12-17 22:16:45,654] Trial 4503 pruned. [I 2021-12-17 22:16:46,258] Trial 4504 pruned. [I 2021-12-17 22:16:49,981] Trial 4505 pruned. [I 2021-12-17 22:16:51,756] Trial 4506 pruned. [I 2021-12-17 22:16:55,317] Trial 4507 pruned. [I 2021-12-17 22:16:57,223] Trial 4508 pruned. [I 2021-12-17 22:16:58,086] Trial 4509 pruned. [I 2021-12-17 22:17:02,081] Trial 4510 pruned. [I 2021-12-17 22:17:05,837] Trial 4511 pruned. [I 2021-12-17 22:17:09,219] Trial 4512 pruned. [I 2021-12-17 22:17:11,056] Trial 4513 pruned. [I 2021-12-17 22:17:13,012] Trial 4514 pruned. [I 2021-12-17 22:17:19,460] Trial 4515 pruned. [I 2021-12-17 22:17:21,358] Trial 4516 pruned. [I 2021-12-17 22:17:25,132] Trial 4517 pruned. [I 2021-12-17 22:17:27,048] Trial 4518 pruned. [I 2021-12-17 22:17:29,398] Trial 4519 pruned. [I 2021-12-17 22:17:32,759] Trial 4520 pruned. [I 2021-12-17 22:17:33,896] Trial 4521 pruned. [I 2021-12-17 22:17:37,459] Trial 4522 pruned. [I 2021-12-17 22:17:41,489] Trial 4523 pruned. [I 2021-12-17 22:17:43,506] Trial 4524 pruned. [I 2021-12-17 22:17:47,310] Trial 4525 pruned. [I 2021-12-17 22:20:15,979] Trial 4526 finished with value: 196.5987548828125 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 338, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 22:20:17,123] Trial 4527 pruned. [I 2021-12-17 22:20:18,085] Trial 4528 pruned. [I 2021-12-17 22:20:20,249] Trial 4529 pruned. [I 2021-12-17 22:20:24,497] Trial 4530 pruned. [I 2021-12-17 22:20:26,409] Trial 4531 pruned. [I 2021-12-17 22:20:28,258] Trial 4532 pruned. [I 2021-12-17 22:20:29,860] Trial 4533 pruned. [I 2021-12-17 22:20:30,518] Trial 4534 pruned. [I 2021-12-17 22:20:33,361] Trial 4535 pruned. [I 2021-12-17 22:20:35,431] Trial 4536 pruned. [I 2021-12-17 22:20:43,191] Trial 4537 pruned. [I 2021-12-17 22:20:46,520] Trial 4538 pruned. [I 2021-12-17 22:20:49,386] Trial 4539 pruned. [I 2021-12-17 22:20:52,207] Trial 4540 pruned. [I 2021-12-17 22:20:57,194] Trial 4541 pruned. [I 2021-12-17 22:21:00,995] Trial 4542 pruned. [I 2021-12-17 22:21:02,729] Trial 4543 pruned. [I 2021-12-17 22:21:10,152] Trial 4544 pruned. [I 2021-12-17 22:21:11,298] Trial 4545 pruned. [I 2021-12-17 22:21:12,514] Trial 4546 pruned. [I 2021-12-17 22:21:16,033] Trial 4547 pruned. [I 2021-12-17 22:21:20,776] Trial 4548 pruned. [I 2021-12-17 22:21:24,316] Trial 4549 pruned. [I 2021-12-17 22:21:26,583] Trial 4550 pruned. [I 2021-12-17 22:21:32,386] Trial 4551 pruned. [I 2021-12-17 22:21:36,488] Trial 4552 pruned. [I 2021-12-17 22:21:38,615] Trial 4553 pruned. [I 2021-12-17 22:21:40,700] Trial 4554 pruned. [I 2021-12-17 22:21:41,427] Trial 4555 pruned. [I 2021-12-17 22:21:43,448] Trial 4556 pruned. [I 2021-12-17 22:21:44,169] Trial 4557 pruned. [I 2021-12-17 22:21:47,848] Trial 4558 pruned. [I 2021-12-17 22:21:51,303] Trial 4559 pruned. [I 2021-12-17 22:21:53,773] Trial 4560 pruned. [I 2021-12-17 22:21:55,390] Trial 4561 pruned. [I 2021-12-17 22:21:57,324] Trial 4562 pruned. [I 2021-12-17 22:21:59,102] Trial 4563 pruned. [I 2021-12-17 22:22:01,126] Trial 4564 pruned. [I 2021-12-17 22:22:04,971] Trial 4565 pruned. [I 2021-12-17 22:22:07,865] Trial 4566 pruned. [I 2021-12-17 22:22:11,121] Trial 4567 pruned. [I 2021-12-17 22:22:12,430] Trial 4568 pruned. [I 2021-12-17 22:22:14,204] Trial 4569 pruned. [I 2021-12-17 22:22:16,040] Trial 4570 pruned. [I 2021-12-17 22:22:18,203] Trial 4571 pruned. [I 2021-12-17 22:22:21,734] Trial 4572 pruned. [I 2021-12-17 22:22:23,994] Trial 4573 pruned. [I 2021-12-17 22:22:27,174] Trial 4574 pruned. [I 2021-12-17 22:22:28,821] Trial 4575 pruned. [I 2021-12-17 22:22:32,701] Trial 4576 pruned. [I 2021-12-17 22:22:33,619] Trial 4577 pruned. [I 2021-12-17 22:22:38,441] Trial 4578 pruned. [I 2021-12-17 22:22:40,455] Trial 4579 pruned. [I 2021-12-17 22:22:43,925] Trial 4580 pruned. [I 2021-12-17 22:22:44,833] Trial 4581 pruned. [I 2021-12-17 22:23:18,248] Trial 4582 pruned. [I 2021-12-17 22:23:20,815] Trial 4583 pruned. [I 2021-12-17 22:23:23,101] Trial 4584 pruned. [I 2021-12-17 22:23:25,269] Trial 4585 pruned. [I 2021-12-17 22:23:29,144] Trial 4586 pruned. [I 2021-12-17 22:23:34,320] Trial 4587 pruned. [I 2021-12-17 22:23:36,354] Trial 4588 pruned. [I 2021-12-17 22:23:37,608] Trial 4589 pruned. [I 2021-12-17 22:23:40,830] Trial 4590 pruned. [I 2021-12-17 22:23:42,343] Trial 4591 pruned. [I 2021-12-17 22:23:44,413] Trial 4592 pruned. [I 2021-12-17 22:23:46,197] Trial 4593 pruned. [I 2021-12-17 22:23:47,460] Trial 4594 pruned. [I 2021-12-17 22:23:54,496] Trial 4595 pruned. [I 2021-12-17 22:23:57,594] Trial 4596 pruned. [I 2021-12-17 22:27:46,381] Trial 4597 finished with value: 163.5379638671875 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 630, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 22:27:48,843] Trial 4598 pruned. [I 2021-12-17 22:27:51,518] Trial 4599 pruned. [I 2021-12-17 22:27:54,357] Trial 4600 pruned. [I 2021-12-17 22:27:59,027] Trial 4601 pruned. [I 2021-12-17 22:28:03,991] Trial 4602 pruned. [I 2021-12-17 22:28:06,606] Trial 4603 pruned. [I 2021-12-17 22:28:11,883] Trial 4604 pruned. [I 2021-12-17 22:28:12,937] Trial 4605 pruned. [I 2021-12-17 22:28:13,734] Trial 4606 pruned. [I 2021-12-17 22:28:15,698] Trial 4607 pruned. [I 2021-12-17 22:28:18,379] Trial 4608 pruned. [I 2021-12-17 22:28:21,115] Trial 4609 pruned. [I 2021-12-17 22:28:23,957] Trial 4610 pruned. [I 2021-12-17 22:28:29,423] Trial 4611 pruned. [I 2021-12-17 22:28:30,990] Trial 4612 pruned. [I 2021-12-17 22:28:35,931] Trial 4613 pruned. [I 2021-12-17 22:28:38,845] Trial 4614 pruned. [I 2021-12-17 22:28:41,361] Trial 4615 pruned. [I 2021-12-17 22:28:44,080] Trial 4616 pruned. [I 2021-12-17 22:28:46,714] Trial 4617 pruned. [I 2021-12-17 22:28:52,455] Trial 4618 pruned. [I 2021-12-17 22:28:53,956] Trial 4619 pruned. [I 2021-12-17 22:28:59,298] Trial 4620 pruned. [I 2021-12-17 22:29:02,313] Trial 4621 pruned. [I 2021-12-17 22:29:05,215] Trial 4622 pruned. [I 2021-12-17 22:29:07,957] Trial 4623 pruned. [I 2021-12-17 22:29:10,628] Trial 4624 pruned. [I 2021-12-17 22:29:13,214] Trial 4625 pruned. [I 2021-12-17 22:29:14,268] Trial 4626 pruned. [I 2021-12-17 22:29:16,849] Trial 4627 pruned. [I 2021-12-17 22:29:19,191] Trial 4628 pruned. [I 2021-12-17 22:29:23,980] Trial 4629 pruned. [I 2021-12-17 22:29:26,337] Trial 4630 pruned. [I 2021-12-17 22:29:27,147] Trial 4631 pruned. [I 2021-12-17 22:29:32,311] Trial 4632 pruned. [I 2021-12-17 22:29:36,920] Trial 4633 pruned. [I 2021-12-17 22:29:38,333] Trial 4634 pruned. [I 2021-12-17 22:29:40,923] Trial 4635 pruned. [I 2021-12-17 22:29:43,693] Trial 4636 pruned. [I 2021-12-17 22:29:46,447] Trial 4637 pruned. [I 2021-12-17 22:29:49,082] Trial 4638 pruned. [I 2021-12-17 22:34:17,375] Trial 4639 finished with value: 177.08853149414062 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 746, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 22:34:22,914] Trial 4640 pruned. [I 2021-12-17 22:34:25,299] Trial 4641 pruned. [I 2021-12-17 22:34:31,107] Trial 4642 pruned. [I 2021-12-17 22:34:33,601] Trial 4643 pruned. [I 2021-12-17 22:34:36,322] Trial 4644 pruned. [I 2021-12-17 22:34:41,790] Trial 4645 pruned. [I 2021-12-17 22:34:43,265] Trial 4646 pruned. [I 2021-12-17 22:34:48,235] Trial 4647 pruned. [I 2021-12-17 22:34:50,912] Trial 4648 pruned. [I 2021-12-17 22:34:53,047] Trial 4649 pruned. [I 2021-12-17 22:34:54,166] Trial 4650 pruned. [I 2021-12-17 22:34:57,127] Trial 4651 pruned. [I 2021-12-17 22:34:59,947] Trial 4652 pruned. [I 2021-12-17 22:35:05,389] Trial 4653 pruned. [I 2021-12-17 22:35:06,301] Trial 4654 pruned. [I 2021-12-17 22:35:08,856] Trial 4655 pruned. [I 2021-12-17 22:35:11,265] Trial 4656 pruned. [I 2021-12-17 22:35:13,954] Trial 4657 pruned. [I 2021-12-17 22:35:16,566] Trial 4658 pruned. [I 2021-12-17 22:35:19,408] Trial 4659 pruned. [I 2021-12-17 22:35:22,362] Trial 4660 pruned. [I 2021-12-17 22:35:27,181] Trial 4661 pruned. [I 2021-12-17 22:35:29,763] Trial 4662 pruned. [I 2021-12-17 22:35:32,184] Trial 4663 pruned. [I 2021-12-17 22:35:35,060] Trial 4664 pruned. [I 2021-12-17 22:35:40,008] Trial 4665 pruned. [I 2021-12-17 22:35:41,459] Trial 4666 pruned. [I 2021-12-17 22:35:44,491] Trial 4667 pruned. [I 2021-12-17 22:35:47,531] Trial 4668 pruned. [I 2021-12-17 22:35:53,448] Trial 4669 pruned. [I 2021-12-17 22:35:59,605] Trial 4670 pruned. [I 2021-12-17 22:36:01,959] Trial 4671 pruned. [I 2021-12-17 22:36:04,627] Trial 4672 pruned. [I 2021-12-17 22:36:09,755] Trial 4673 pruned. [I 2021-12-17 22:36:10,865] Trial 4674 pruned. [I 2021-12-17 22:36:13,161] Trial 4675 pruned. [I 2021-12-17 22:36:18,119] Trial 4676 pruned. [I 2021-12-17 22:36:20,543] Trial 4677 pruned. [I 2021-12-17 22:36:21,127] Trial 4678 pruned. [I 2021-12-17 22:36:23,587] Trial 4679 pruned. [I 2021-12-17 22:36:27,026] Trial 4680 pruned. [I 2021-12-17 22:36:29,697] Trial 4681 pruned. [I 2021-12-17 22:36:32,763] Trial 4682 pruned. [I 2021-12-17 22:36:35,350] Trial 4683 pruned. [I 2021-12-17 22:36:38,673] Trial 4684 pruned. [I 2021-12-17 22:36:41,594] Trial 4685 pruned. [I 2021-12-17 22:36:49,752] Trial 4686 pruned. [I 2021-12-17 22:36:53,025] Trial 4687 pruned. [I 2021-12-17 22:36:55,991] Trial 4688 pruned. [I 2021-12-17 22:36:58,299] Trial 4689 pruned. [I 2021-12-17 22:37:01,202] Trial 4690 pruned. [I 2021-12-17 22:37:02,967] Trial 4691 pruned. [I 2021-12-17 22:37:05,480] Trial 4692 pruned. [I 2021-12-17 22:37:09,888] Trial 4693 pruned. [I 2021-12-17 22:37:14,022] Trial 4694 pruned. [I 2021-12-17 22:37:17,007] Trial 4695 pruned. [I 2021-12-17 22:37:20,013] Trial 4696 pruned. [I 2021-12-17 22:37:22,514] Trial 4697 pruned. [I 2021-12-17 22:37:23,508] Trial 4698 pruned. [I 2021-12-17 22:37:27,300] Trial 4699 pruned. [I 2021-12-17 22:37:29,766] Trial 4700 pruned. [I 2021-12-17 22:37:33,055] Trial 4701 pruned. [I 2021-12-17 22:37:33,904] Trial 4702 pruned. [I 2021-12-17 22:37:35,225] Trial 4703 pruned. [I 2021-12-17 22:37:43,248] Trial 4704 pruned. [I 2021-12-17 22:37:47,391] Trial 4705 pruned. [I 2021-12-17 22:37:50,036] Trial 4706 pruned. [I 2021-12-17 22:37:55,247] Trial 4707 pruned. [I 2021-12-17 22:37:59,314] Trial 4708 pruned. [I 2021-12-17 22:38:02,257] Trial 4709 pruned. [I 2021-12-17 22:38:06,235] Trial 4710 pruned. [I 2021-12-17 22:38:08,892] Trial 4711 pruned. [I 2021-12-17 22:38:13,341] Trial 4712 pruned. [I 2021-12-17 22:38:15,861] Trial 4713 pruned. [I 2021-12-17 22:38:18,144] Trial 4714 pruned. [I 2021-12-17 22:38:21,285] Trial 4715 pruned. [I 2021-12-17 22:38:22,574] Trial 4716 pruned. [I 2021-12-17 22:38:26,116] Trial 4717 pruned. [I 2021-12-17 22:38:28,312] Trial 4718 pruned. [I 2021-12-17 22:38:30,499] Trial 4719 pruned. [I 2021-12-17 22:38:33,456] Trial 4720 pruned. [I 2021-12-17 22:38:35,835] Trial 4721 pruned. [I 2021-12-17 22:38:39,385] Trial 4722 pruned. [I 2021-12-17 22:38:41,555] Trial 4723 pruned. [I 2021-12-17 22:38:42,526] Trial 4724 pruned. [I 2021-12-17 22:38:45,217] Trial 4725 pruned. [I 2021-12-17 22:38:46,070] Trial 4726 pruned. [I 2021-12-17 22:38:49,335] Trial 4727 pruned. [I 2021-12-17 22:38:56,523] Trial 4728 pruned. [I 2021-12-17 22:38:59,690] Trial 4729 pruned. [I 2021-12-17 22:39:02,474] Trial 4730 pruned. [I 2021-12-17 22:39:06,537] Trial 4731 pruned. [I 2021-12-17 22:39:08,971] Trial 4732 pruned. [I 2021-12-17 22:39:12,578] Trial 4733 pruned. [I 2021-12-17 22:39:15,374] Trial 4734 pruned. [I 2021-12-17 22:39:17,603] Trial 4735 pruned. [I 2021-12-17 22:39:19,467] Trial 4736 pruned. [I 2021-12-17 22:39:22,501] Trial 4737 pruned. [I 2021-12-17 22:39:25,245] Trial 4738 pruned. [I 2021-12-17 22:39:26,607] Trial 4739 pruned. [I 2021-12-17 22:39:32,332] Trial 4740 pruned. [I 2021-12-17 22:42:28,840] Trial 4741 finished with value: 176.04234313964844 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 444, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 22:42:31,340] Trial 4742 pruned. [I 2021-12-17 22:42:32,775] Trial 4743 pruned. [I 2021-12-17 22:42:34,893] Trial 4744 pruned. [I 2021-12-17 22:42:36,974] Trial 4745 pruned. [I 2021-12-17 22:42:37,944] Trial 4746 pruned. [I 2021-12-17 22:42:39,955] Trial 4747 pruned. [I 2021-12-17 22:42:42,273] Trial 4748 pruned. [I 2021-12-17 22:42:47,777] Trial 4749 pruned. [I 2021-12-17 22:42:48,538] Trial 4750 pruned. [I 2021-12-17 22:42:56,945] Trial 4751 pruned. [I 2021-12-17 22:43:00,892] Trial 4752 pruned. [I 2021-12-17 22:43:04,249] Trial 4753 pruned. [I 2021-12-17 22:43:09,603] Trial 4754 pruned. [I 2021-12-17 22:43:12,637] Trial 4755 pruned. [I 2021-12-17 22:43:14,979] Trial 4756 pruned. [I 2021-12-17 22:43:17,488] Trial 4757 pruned. [I 2021-12-17 22:43:23,065] Trial 4758 pruned. [I 2021-12-17 22:43:24,741] Trial 4759 pruned. [I 2021-12-17 22:43:26,770] Trial 4760 pruned. [I 2021-12-17 22:43:30,921] Trial 4761 pruned. [I 2021-12-17 22:43:32,914] Trial 4762 pruned. [I 2021-12-17 22:43:34,241] Trial 4763 pruned. [I 2021-12-17 22:43:37,082] Trial 4764 pruned. [I 2021-12-17 22:43:41,413] Trial 4765 pruned. [I 2021-12-17 22:43:43,396] Trial 4766 pruned. [I 2021-12-17 22:43:49,706] Trial 4767 pruned. [I 2021-12-17 22:43:55,694] Trial 4768 pruned. [I 2021-12-17 22:43:57,813] Trial 4769 pruned. [I 2021-12-17 22:43:58,651] Trial 4770 pruned. [I 2021-12-17 22:44:01,793] Trial 4771 pruned. [I 2021-12-17 22:44:04,053] Trial 4772 pruned. [I 2021-12-17 22:44:10,974] Trial 4773 pruned. [I 2021-12-17 22:44:12,012] Trial 4774 pruned. [I 2021-12-17 22:44:14,005] Trial 4775 pruned. [I 2021-12-17 22:44:15,398] Trial 4776 pruned. [I 2021-12-17 22:44:19,589] Trial 4777 pruned. [I 2021-12-17 22:44:21,656] Trial 4778 pruned. [I 2021-12-17 22:44:25,508] Trial 4779 pruned. [I 2021-12-17 22:44:27,741] Trial 4780 pruned. [I 2021-12-17 22:44:29,650] Trial 4781 pruned. [I 2021-12-17 22:44:32,074] Trial 4782 pruned. [I 2021-12-17 22:44:34,196] Trial 4783 pruned. [I 2021-12-17 22:44:36,889] Trial 4784 pruned. [I 2021-12-17 22:44:39,089] Trial 4785 pruned. [I 2021-12-17 22:44:41,498] Trial 4786 pruned. [I 2021-12-17 22:44:43,103] Trial 4787 pruned. [I 2021-12-17 22:44:48,605] Trial 4788 pruned. [I 2021-12-17 22:44:51,756] Trial 4789 pruned. [I 2021-12-17 22:44:53,715] Trial 4790 pruned. [I 2021-12-17 22:44:59,254] Trial 4791 pruned. [I 2021-12-17 22:45:03,411] Trial 4792 pruned. [I 2021-12-17 22:45:05,415] Trial 4793 pruned. [I 2021-12-17 22:45:07,601] Trial 4794 pruned. [I 2021-12-17 22:45:08,740] Trial 4795 pruned. [I 2021-12-17 22:45:09,944] Trial 4796 pruned. [I 2021-12-17 22:45:13,762] Trial 4797 pruned. [I 2021-12-17 22:45:14,724] Trial 4798 pruned. [I 2021-12-17 22:45:20,143] Trial 4799 pruned. [I 2021-12-17 22:45:22,209] Trial 4800 pruned. [I 2021-12-17 22:45:25,834] Trial 4801 pruned. [I 2021-12-17 22:45:27,498] Trial 4802 pruned. [I 2021-12-17 22:45:29,443] Trial 4803 pruned. [I 2021-12-17 22:45:31,615] Trial 4804 pruned. [I 2021-12-17 22:45:32,793] Trial 4805 pruned. [I 2021-12-17 22:45:38,464] Trial 4806 pruned. [I 2021-12-17 22:45:42,898] Trial 4807 pruned. [I 2021-12-17 22:45:47,121] Trial 4808 pruned. [I 2021-12-17 22:45:48,420] Trial 4809 pruned. [I 2021-12-17 22:45:50,507] Trial 4810 pruned. [I 2021-12-17 22:45:51,716] Trial 4811 pruned. [I 2021-12-17 22:45:53,725] Trial 4812 pruned. [I 2021-12-17 22:45:59,786] Trial 4813 pruned. [I 2021-12-17 22:46:01,867] Trial 4814 pruned. [I 2021-12-17 22:46:04,997] Trial 4815 pruned. [I 2021-12-17 22:46:08,995] Trial 4816 pruned. [I 2021-12-17 22:46:11,069] Trial 4817 pruned. [I 2021-12-17 22:46:12,028] Trial 4818 pruned. [I 2021-12-17 22:46:15,311] Trial 4819 pruned. [I 2021-12-17 22:46:18,299] Trial 4820 pruned. [I 2021-12-17 22:46:20,253] Trial 4821 pruned. [I 2021-12-17 22:46:21,032] Trial 4822 pruned. [I 2021-12-17 22:46:23,309] Trial 4823 pruned. [I 2021-12-17 22:46:26,847] Trial 4824 pruned. [I 2021-12-17 22:46:28,301] Trial 4825 pruned. [I 2021-12-17 22:46:30,223] Trial 4826 pruned. [I 2021-12-17 22:46:32,424] Trial 4827 pruned. [I 2021-12-17 22:46:35,358] Trial 4828 pruned. [I 2021-12-17 22:46:43,098] Trial 4829 pruned. [I 2021-12-17 22:46:44,343] Trial 4830 pruned. [I 2021-12-17 22:46:47,342] Trial 4831 pruned. [I 2021-12-17 22:46:50,000] Trial 4832 pruned. [I 2021-12-17 22:46:52,083] Trial 4833 pruned. [I 2021-12-17 22:46:54,035] Trial 4834 pruned. [I 2021-12-17 22:46:55,411] Trial 4835 pruned. [I 2021-12-17 22:46:57,787] Trial 4836 pruned. [I 2021-12-17 22:46:59,880] Trial 4837 pruned. [I 2021-12-17 22:47:06,388] Trial 4838 pruned. [I 2021-12-17 22:49:46,178] Trial 4839 finished with value: 176.6631317138672 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 372, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 22:49:49,109] Trial 4840 pruned. [I 2021-12-17 22:49:51,162] Trial 4841 pruned. [I 2021-12-17 22:49:53,284] Trial 4842 pruned. [I 2021-12-17 22:49:54,231] Trial 4843 pruned. [I 2021-12-17 22:49:56,421] Trial 4844 pruned. [I 2021-12-17 22:49:58,064] Trial 4845 pruned. [I 2021-12-17 22:50:01,584] Trial 4846 pruned. [I 2021-12-17 22:50:02,366] Trial 4847 pruned. [I 2021-12-17 22:52:41,036] Trial 4848 finished with value: 167.0767364501953 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 378, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 22:52:43,314] Trial 4849 pruned. [I 2021-12-17 22:52:46,384] Trial 4850 pruned. [I 2021-12-17 22:52:48,091] Trial 4851 pruned. [I 2021-12-17 22:52:49,270] Trial 4852 pruned. [I 2021-12-17 22:52:52,510] Trial 4853 pruned. [I 2021-12-17 22:52:54,510] Trial 4854 pruned. [I 2021-12-17 22:53:00,065] Trial 4855 pruned. [I 2021-12-17 22:53:02,097] Trial 4856 pruned. [I 2021-12-17 22:53:04,483] Trial 4857 pruned. [I 2021-12-17 22:53:06,375] Trial 4858 pruned. [I 2021-12-17 22:53:08,582] Trial 4859 pruned. [I 2021-12-17 22:53:10,529] Trial 4860 pruned. [I 2021-12-17 22:53:12,512] Trial 4861 pruned. [I 2021-12-17 22:53:15,017] Trial 4862 pruned. [I 2021-12-17 22:53:17,510] Trial 4863 pruned. [I 2021-12-17 22:53:18,800] Trial 4864 pruned. [I 2021-12-17 22:53:21,114] Trial 4865 pruned. [I 2021-12-17 22:53:22,105] Trial 4866 pruned. [I 2021-12-17 22:53:23,988] Trial 4867 pruned. [I 2021-12-17 22:53:29,688] Trial 4868 pruned. [I 2021-12-17 22:53:33,328] Trial 4869 pruned. [I 2021-12-17 22:53:34,090] Trial 4870 pruned. [I 2021-12-17 22:53:36,865] Trial 4871 pruned. [I 2021-12-17 22:53:40,327] Trial 4872 pruned. [I 2021-12-17 22:53:43,431] Trial 4873 pruned. [I 2021-12-17 22:53:45,419] Trial 4874 pruned. [I 2021-12-17 22:53:46,520] Trial 4875 pruned. [I 2021-12-17 22:53:48,624] Trial 4876 pruned. [I 2021-12-17 22:53:50,735] Trial 4877 pruned. [I 2021-12-17 22:53:53,601] Trial 4878 pruned. [I 2021-12-17 22:56:43,914] Trial 4879 finished with value: 165.36447143554688 and parameters: {'batch_size': 16, 'n_hdn_layers': 3, 'neurons_HL1': 432, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 22:56:47,288] Trial 4880 pruned. [I 2021-12-17 22:56:49,667] Trial 4881 pruned. [I 2021-12-17 22:56:55,556] Trial 4882 pruned. [I 2021-12-17 22:56:56,782] Trial 4883 pruned. [I 2021-12-17 22:56:58,844] Trial 4884 pruned. [I 2021-12-17 22:59:57,883] Trial 4885 finished with value: 164.8739013671875 and parameters: {'batch_size': 16, 'n_hdn_layers': 4, 'neurons_HL1': 454, 'out_channel': 128, 'kernel_size': 5, 'conv_activation': 'linear', 'dropout_prob': 0.1, 'mx_pl_size': 2, 'mx_pl_strides': 3, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 3271 with value: 158.00619506835938. [I 2021-12-17 23:00:00,250] Trial 4886 pruned. [I 2021-12-17 23:00:05,833] Trial 4887 pruned. [I 2021-12-17 23:00:11,969] Trial 4888 pruned. [I 2021-12-17 23:00:14,070] Trial 4889 pruned. [I 2021-12-17 23:00:16,113] Trial 4890 pruned. [I 2021-12-17 23:00:18,124] Trial 4891 pruned. [I 2021-12-17 23:00:19,439] Trial 4892 pruned. [I 2021-12-17 23:00:21,705] Trial 4893 pruned. [I 2021-12-17 23:00:23,786] Trial 4894 pruned. [I 2021-12-17 23:00:24,570] Trial 4895 pruned. [I 2021-12-17 23:00:26,598] Trial 4896 pruned. [I 2021-12-17 23:00:32,658] Trial 4897 pruned. [I 2021-12-17 23:00:34,023] Trial 4898 pruned. [I 2021-12-17 23:00:35,906] Trial 4899 pruned. [I 2021-12-17 23:00:38,980] Trial 4900 pruned. [I 2021-12-17 23:00:41,550] Trial 4901 pruned. [I 2021-12-17 23:00:48,609] Trial 4902 pruned. [I 2021-12-17 23:00:50,439] Trial 4903 pruned. [I 2021-12-17 23:00:52,471] Trial 4904 pruned. [I 2021-12-17 23:00:54,617] Trial 4905 pruned. [I 2021-12-17 23:00:58,533] Trial 4906 pruned. [I 2021-12-17 23:01:01,481] Trial 4907 pruned. [I 2021-12-17 23:01:03,480] Trial 4908 pruned. [I 2021-12-17 23:01:05,110] Trial 4909 pruned. [I 2021-12-17 23:01:07,524] Trial 4910 pruned. [I 2021-12-17 23:01:09,478] Trial 4911 pruned. [I 2021-12-17 23:01:11,599] Trial 4912 pruned. [I 2021-12-17 23:01:13,164] Trial 4913 pruned. [I 2021-12-17 23:01:14,117] Trial 4914 pruned. [I 2021-12-17 23:01:15,588] Trial 4915 pruned. [I 2021-12-17 23:01:19,449] Trial 4916 pruned. [I 2021-12-17 23:01:20,664] Trial 4917 pruned. [I 2021-12-17 23:01:21,561] Trial 4918 pruned. [I 2021-12-17 23:01:23,834] Trial 4919 pruned. [I 2021-12-17 23:01:24,960] Trial 4920 pruned. [I 2021-12-17 23:01:28,110] Trial 4921 pruned. [I 2021-12-17 23:01:29,946] Trial 4922 pruned. [I 2021-12-17 23:01:32,820] Trial 4923 pruned. [I 2021-12-17 23:01:34,929] Trial 4924 pruned. [I 2021-12-17 23:01:38,342] Trial 4925 pruned. [I 2021-12-17 23:01:40,712] Trial 4926 pruned. [I 2021-12-17 23:01:42,646] Trial 4927 pruned. [I 2021-12-17 23:01:49,088] Trial 4928 pruned. [I 2021-12-17 23:01:51,904] Trial 4929 pruned. [I 2021-12-17 23:01:55,597] Trial 4930 pruned. [I 2021-12-17 23:01:56,910] Trial 4931 pruned. [I 2021-12-17 23:02:00,792] Trial 4932 pruned. [I 2021-12-17 23:02:03,335] Trial 4933 pruned. [I 2021-12-17 23:02:05,531] Trial 4934 pruned. [I 2021-12-17 23:02:09,097] Trial 4935 pruned. [I 2021-12-17 23:02:11,405] Trial 4936 pruned. [I 2021-12-17 23:02:14,688] Trial 4937 pruned. [I 2021-12-17 23:02:15,540] Trial 4938 pruned. [I 2021-12-17 23:02:19,165] Trial 4939 pruned. [I 2021-12-17 23:02:21,189] Trial 4940 pruned. [I 2021-12-17 23:02:22,426] Trial 4941 pruned. [I 2021-12-17 23:02:23,350] Trial 4942 pruned. [I 2021-12-17 23:02:25,411] Trial 4943 pruned. [I 2021-12-17 23:02:33,803] Trial 4944 pruned. [I 2021-12-17 23:02:35,133] Trial 4945 pruned. [I 2021-12-17 23:02:36,760] Trial 4946 pruned. [I 2021-12-17 23:02:39,264] Trial 4947 pruned. [I 2021-12-17 23:02:41,831] Trial 4948 pruned. [I 2021-12-17 23:02:47,903] Trial 4949 pruned. [I 2021-12-17 23:02:50,082] Trial 4950 pruned. [I 2021-12-17 23:02:52,186] Trial 4951 pruned. [I 2021-12-17 23:02:57,540] Trial 4952 pruned. [I 2021-12-17 23:03:00,742] Trial 4953 pruned. [I 2021-12-17 23:03:02,208] Trial 4954 pruned. [I 2021-12-17 23:03:05,037] Trial 4955 pruned. [I 2021-12-17 23:03:11,753] Trial 4956 pruned. [I 2021-12-17 23:03:13,900] Trial 4957 pruned. [I 2021-12-17 23:03:15,124] Trial 4958 pruned. [I 2021-12-17 23:03:17,069] Trial 4959 pruned. [I 2021-12-17 23:03:21,703] Trial 4960 pruned. [I 2021-12-17 23:03:23,012] Trial 4961 pruned. [I 2021-12-17 23:03:25,200] Trial 4962 pruned. [I 2021-12-17 23:03:26,157] Trial 4963 pruned. [I 2021-12-17 23:03:28,212] Trial 4964 pruned. [I 2021-12-17 23:03:30,089] Trial 4965 pruned. [I 2021-12-17 23:03:30,988] Trial 4966 pruned. [I 2021-12-17 23:03:32,951] Trial 4967 pruned. [I 2021-12-17 23:03:35,283] Trial 4968 pruned. [I 2021-12-17 23:03:37,407] Trial 4969 pruned. [I 2021-12-17 23:03:39,193] Trial 4970 pruned. [I 2021-12-17 23:03:41,203] Trial 4971 pruned. [I 2021-12-17 23:03:47,340] Trial 4972 pruned. [I 2021-12-17 23:03:49,007] Trial 4973 pruned. [I 2021-12-17 23:03:54,881] Trial 4974 pruned. [I 2021-12-17 23:03:58,639] Trial 4975 pruned. [I 2021-12-17 23:04:00,530] Trial 4976 pruned. [I 2021-12-17 23:04:02,404] Trial 4977 pruned. [I 2021-12-17 23:04:04,963] Trial 4978 pruned. [I 2021-12-17 23:04:06,339] Trial 4979 pruned. [I 2021-12-17 23:04:16,621] Trial 4980 pruned. [I 2021-12-17 23:04:20,371] Trial 4981 pruned. [I 2021-12-17 23:04:23,196] Trial 4982 pruned. [I 2021-12-17 23:04:24,440] Trial 4983 pruned. [I 2021-12-17 23:04:26,711] Trial 4984 pruned. [I 2021-12-17 23:04:28,768] Trial 4985 pruned. [I 2021-12-17 23:04:32,231] Trial 4986 pruned. [I 2021-12-17 23:04:33,033] Trial 4987 pruned. [I 2021-12-17 23:04:34,482] Trial 4988 pruned. [I 2021-12-17 23:04:38,054] Trial 4989 pruned. [I 2021-12-17 23:04:38,924] Trial 4990 pruned. [I 2021-12-17 23:04:41,498] Trial 4991 pruned. [I 2021-12-17 23:04:43,302] Trial 4992 pruned. [I 2021-12-17 23:04:45,711] Trial 4993 pruned. [I 2021-12-17 23:04:47,918] Trial 4994 pruned. [I 2021-12-17 23:04:51,170] Trial 4995 pruned. [I 2021-12-17 23:04:53,413] Trial 4996 pruned. [I 2021-12-17 23:04:57,058] Trial 4997 pruned. [I 2021-12-17 23:05:02,816] Trial 4998 pruned. [I 2021-12-17 23:05:04,062] Trial 4999 pruned.
Wall time: 11h 28min 4s
trials_df = study.trials_dataframe()
trials_df
| number | value | datetime_start | datetime_complete | duration | params_HL0_ac_fn | params_HL1_ac_fn | params_HL2_ac_fn | params_HL3_ac_fn | params_HL4_ac_fn | params_batch_size | params_conv_activation | params_dropout_prob | params_kernel_size | params_mx_pl_size | params_mx_pl_strides | params_n_hdn_layers | params_neurons_HL1 | params_out_channel | state | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 378.089020 | 2021-12-17 11:36:59.415294 | 2021-12-17 11:37:39.233786 | 0 days 00:00:39.818492 | relu | linear | NaN | NaN | NaN | 32 | relu | 0.50 | 5 | 4 | 3 | 2 | 160 | 64 | COMPLETE |
| 1 | 1 | 236.322586 | 2021-12-17 11:37:39.234783 | 2021-12-17 11:39:33.704597 | 0 days 00:01:54.469814 | linear | linear | linear | relu | relu | 32 | relu | 0.10 | 3 | 5 | 2 | 5 | 990 | 32 | COMPLETE |
| 2 | 2 | 334.690796 | 2021-12-17 11:39:33.705595 | 2021-12-17 11:40:05.581333 | 0 days 00:00:31.875738 | relu | linear | NaN | NaN | NaN | 128 | relu | 0.25 | 3 | 2 | 3 | 2 | 1012 | 32 | COMPLETE |
| 3 | 3 | 183.645325 | 2021-12-17 11:40:05.582330 | 2021-12-17 11:40:53.602885 | 0 days 00:00:48.020555 | relu | linear | relu | NaN | NaN | 32 | linear | 0.10 | 5 | 3 | 1 | 3 | 28 | 128 | COMPLETE |
| 4 | 4 | 193.881012 | 2021-12-17 11:40:53.603881 | 2021-12-17 11:41:21.161170 | 0 days 00:00:27.557289 | linear | relu | NaN | NaN | NaN | 128 | linear | 0.20 | 4 | 2 | 1 | 2 | 524 | 32 | COMPLETE |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 4995 | 4995 | 352.493896 | 2021-12-17 23:04:47.919181 | 2021-12-17 23:04:51.170484 | 0 days 00:00:03.251303 | relu | linear | relu | linear | NaN | 16 | linear | 0.10 | 5 | 3 | 3 | 4 | 828 | 128 | PRUNED |
| 4996 | 4996 | 248.742035 | 2021-12-17 23:04:51.171482 | 2021-12-17 23:04:53.413485 | 0 days 00:00:02.242003 | relu | linear | relu | linear | NaN | 16 | linear | 0.10 | 4 | 2 | 3 | 4 | 468 | 128 | PRUNED |
| 4997 | 4997 | 222.198105 | 2021-12-17 23:04:53.414482 | 2021-12-17 23:04:57.058734 | 0 days 00:00:03.644252 | relu | linear | relu | linear | NaN | 16 | linear | 0.10 | 5 | 2 | 3 | 4 | 370 | 128 | PRUNED |
| 4998 | 4998 | 220.144577 | 2021-12-17 23:04:57.059732 | 2021-12-17 23:05:02.816334 | 0 days 00:00:05.756602 | relu | linear | relu | linear | NaN | 16 | linear | 0.10 | 5 | 2 | 3 | 4 | 726 | 64 | PRUNED |
| 4999 | 4999 | 315.353149 | 2021-12-17 23:05:02.817331 | 2021-12-17 23:05:04.062999 | 0 days 00:00:01.245668 | relu | linear | relu | linear | NaN | 16 | linear | 0.10 | 5 | 2 | 3 | 4 | 94 | 128 | PRUNED |
5000 rows × 20 columns
trials_df.to_pickle('cnn_trials/' + study_name + '_df.pkl')
# save the study for resuming later:
joblib.dump(study, "cnn_trials/" + study_name + '_study.pkl')
['cnn_trials/cnn_45_scaled_one_conv_lyr_study.pkl']
graph = optuna.visualization.plot_parallel_coordinate(study, params=['out_channel', 'kernel_size', 'conv_activation'])
graph.write_image("cnn_trials/" + study_name + 'cnn_portion_plot.jpeg')
graph.show()
graph = optuna.visualization.plot_parallel_coordinate(study, params=['dropout_prob', 'mx_pl_size', 'mx_pl_strides'])
graph.write_image("cnn_trials/" + study_name + 'dropout_mx_pl_portion_plot.jpeg')
graph.show()
optuna.visualization.plot_optimization_history(study)
optuna.visualization.plot_intermediate_values(study)
# to load the study later:
study = joblib.load("cnn_trials/cnn_45_scaled_one_conv_lyr_study.pkl")
print("Best trial until now:")
print(" Value: ", study.best_trial.value)
print(" Params: ")
for key, value in study.best_trial.params.items():
print(f" {key}: {value}")
Best trial until now:
Value: 158.00619506835938
Params:
batch_size: 16
n_hdn_layers: 4
neurons_HL1: 714
out_channel: 128
kernel_size: 5
conv_activation: linear
dropout_prob: 0.1
mx_pl_size: 2
mx_pl_strides: 3
HL0_ac_fn: relu
HL1_ac_fn: linear
HL2_ac_fn: relu
HL3_ac_fn: linear
%%time
torch.manual_seed(42)
torch.cuda.manual_seed(42)
np.random.seed(42)
random.seed(42)
mape_train = []
mape_scores = []
rmse_train = []
rmse_scores = []
mae_train = []
mae_scores = []
r2_train = []
r2_scores = []
n_epochs = 1000
# src: https://stackoverflow.com/questions/45113245/how-to-get-mini-batches-in-pytorch-in-a-clean-and-efficient-way
def tuned_model_investigation(trial):
"""This function will take the best trial and run it with the same objective function as above.
However, this time, there are more scores that are returned. The model is also saved as pickle in this function for
use later."""
print("Best Study Parameters:")
for key, value in trial.params.items():
print(f" {key}: {value}")
# set up GPU if available.
device = "cpu"
if torch.cuda.is_available():
device = "cuda:0"
device = torch.device(device)
# get some parameters using optuna:
batch_size = trial.suggest_categorical("batch_size", [16,32,64,128])
n_layers = trial.suggest_int("n_hdn_layers", 2, 5)
max_nrns = trial.suggest_int("neurons_HL1", 2, 1024, step=2)
# now for the 1D convolution parameters
out_channel = trial.suggest_categorical("out_channel", [32, 64, 128])
kernel_size = trial.suggest_categorical("kernel_size", [3,4,5])
conv_activation = trial.suggest_categorical("conv_activation", ['relu', 'linear'])
# now for the dropout and max pool 1D parameters
dropout_prob = trial.suggest_categorical("dropout_prob", [0.1, 0.2, 0.25, 0.5])
mx_pl_size = trial.suggest_categorical("mx_pl_size", [2,3,4,5])
mx_pl_strides = trial.suggest_categorical("mx_pl_strides", [1,2,3])
layers = []
layers.append(torch.nn.Unflatten(dim=1, unflattened_size=(1,45)))
layers.append(torch.nn.Conv1d(1, out_channel, kernel_size))
if conv_activation == 'relu':
layers.append(torch.nn.ReLU())
layers.append(torch.nn.Dropout(dropout_prob))
layers.append(torch.nn.MaxPool1d(mx_pl_size, stride=mx_pl_strides))
layers.append(torch.nn.Flatten())
# now we calculate the output size of MaxPool1D
in_features = math.floor(((math.floor(45 - kernel_size + 1) - mx_pl_size) / mx_pl_strides) + 1)
in_features = in_features*out_channel
# print(f"batch_size{batch_size}\tmx_pl_size{mx_pl_size}\tmx_pl_strides{mx_pl_strides}")
# print(f"in_features{in_features}")
out_features = 0
for i in range(n_layers):
out_features = int(max_nrns)
layers.append(torch.nn.Linear(in_features, out_features))
activation = trial.suggest_categorical(f"HL{i}_ac_fn", ["relu", "linear"])
if activation == "relu":
layers.append(torch.nn.ReLU())
in_features = out_features
# to prevent the last layer being Linear(0,1)
if max_nrns > 2:
max_nrns = max_nrns/2
layers.append(torch.nn.Linear(out_features, 1))
cnn_model = torch.nn.Sequential(*layers).to(device)
# print(cnn_model)
# now we save the model:
with open('cnn_trials/' + study_name + '_trial{}.pickle'.format(trial.number), 'wb') as fout:
pickle.dump(cnn_model, fout)
print(cnn_model)
# use MAE as loss function (called L1Loss).
loss_fn = nn.L1Loss()
optimizer = optim.Adam(cnn_model.parameters())
for epoch in range(n_epochs):
# now we train the model:
cnn_model.train()
# we create a random permutation of numbers from X_scaled_train.size()
permutation = torch.randperm(X_scaled_train.size()[0])
# we go through the batches.
for i in range(0, X_scaled_train.size()[0], batch_size):
indices = permutation[i:i+batch_size]
X_train_batch, Y_train_batch = X_scaled_train[indices], Y_train[indices]
# input X_scaled_train into cnn and get predictions.
train_prediction = cnn_model(X_train_batch.to(device))
train_loss = loss_fn(train_prediction, Y_train_batch.to(device))
train_mape = MAPE_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
train_rmse = RMSE_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
train_r2 = R2_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
optimizer.zero_grad()
# backpropagation
train_loss.backward()
optimizer.step()
mae_train.append(train_loss.item())
mape_train.append(train_mape.item())
rmse_train.append(train_rmse.item())
r2_train.append(train_r2.item())
# now we test the model:
cnn_model.eval()
test_prediction = cnn_model(X_scaled_test.to(device))
test_loss = loss_fn(test_prediction, Y_test.to(device))
test_mape = MAPE_pytorch(Y_test.to('cpu'), test_prediction.to('cpu'))
test_rmse = RMSE_pytorch(Y_test.to('cpu'), test_prediction.to('cpu'))
test_r2 = R2_pytorch(Y_test.to('cpu'), test_prediction.to('cpu'))
mae_scores.append(test_loss.item())
mape_scores.append(test_mape.item())
rmse_scores.append(test_rmse.item())
r2_scores.append(test_r2.item())
print(f"Epoch{epoch+1}\ttrain_loss={train_loss};\ttest_loss={test_loss}")
# we return the thing we are trying to maximize or minimize (the MAE - our loss fn)
return mae_scores, mape_scores, rmse_scores, r2_scores
mae_scores, mape_scores, rmse_scores, r2_scores = tuned_model_investigation(
study.best_trial)
print("\n\n------------------------------------------- TRAINING SCORES -------------------------------------------")
print(f"Overall MAE: {np.mean(mae_train)} +/- {np.std(mae_train)}")
print(f"Overall RMSE: {np.mean(rmse_train)} +/- {np.std(rmse_train)}")
print(f"Overall MAPE: {np.mean(mape_train)} +/- {np.std(mape_train)}")
print(f"Overall R2: {np.mean(r2_train)*100} +/- {np.std(r2_train)*100}")
print("\n\n------------------------------------------- TESTING SCORES -------------------------------------------")
print(f"Overall MAE: {np.mean(mae_scores)} +/- {np.std(mae_scores)}")
print(f"Overall RMSE: {np.mean(rmse_scores)} +/- {np.std(rmse_scores)}")
print(f"Overall MAPE: {np.mean(mape_scores)} +/- {np.std(mape_scores)}")
print(f"Overall R2: {np.mean(r2_scores)*100} +/- {np.std(r2_scores)*100}\n\n")
Best Study Parameters:
batch_size: 16
n_hdn_layers: 4
neurons_HL1: 714
out_channel: 128
kernel_size: 5
conv_activation: linear
dropout_prob: 0.1
mx_pl_size: 2
mx_pl_strides: 3
HL0_ac_fn: relu
HL1_ac_fn: linear
HL2_ac_fn: relu
HL3_ac_fn: linear
Sequential(
(0): Unflatten(dim=1, unflattened_size=(1, 45))
(1): Conv1d(1, 128, kernel_size=(5,), stride=(1,))
(2): Dropout(p=0.1, inplace=False)
(3): MaxPool1d(kernel_size=2, stride=3, padding=0, dilation=1, ceil_mode=False)
(4): Flatten(start_dim=1, end_dim=-1)
(5): Linear(in_features=1792, out_features=714, bias=True)
(6): ReLU()
(7): Linear(in_features=714, out_features=357, bias=True)
(8): Linear(in_features=357, out_features=178, bias=True)
(9): ReLU()
(10): Linear(in_features=178, out_features=89, bias=True)
(11): Linear(in_features=89, out_features=1, bias=True)
)
Epoch1 train_loss=290.9295654296875; test_loss=264.4689636230469
Epoch2 train_loss=116.10122680664062; test_loss=221.08963012695312
Epoch3 train_loss=366.896728515625; test_loss=214.2647247314453
Epoch4 train_loss=153.6276092529297; test_loss=221.29481506347656
Epoch5 train_loss=295.6541442871094; test_loss=218.2425994873047
Epoch6 train_loss=462.465576171875; test_loss=190.7745361328125
Epoch7 train_loss=293.32769775390625; test_loss=217.14053344726562
Epoch8 train_loss=50.60541915893555; test_loss=198.54263305664062
Epoch9 train_loss=93.45606231689453; test_loss=221.6156463623047
Epoch10 train_loss=49.198604583740234; test_loss=206.65525817871094
Epoch11 train_loss=127.64714050292969; test_loss=202.03724670410156
Epoch12 train_loss=288.8114013671875; test_loss=186.8195343017578
Epoch13 train_loss=277.60479736328125; test_loss=194.42083740234375
Epoch14 train_loss=248.00228881835938; test_loss=201.2762908935547
Epoch15 train_loss=195.52777099609375; test_loss=183.5404052734375
Epoch16 train_loss=619.459716796875; test_loss=183.6191864013672
Epoch17 train_loss=136.78871154785156; test_loss=175.9234619140625
Epoch18 train_loss=202.06582641601562; test_loss=183.02769470214844
Epoch19 train_loss=46.63916778564453; test_loss=185.93809509277344
Epoch20 train_loss=195.2716064453125; test_loss=182.32191467285156
Epoch21 train_loss=167.74066162109375; test_loss=174.74017333984375
Epoch22 train_loss=120.59965515136719; test_loss=198.19949340820312
Epoch23 train_loss=102.02284240722656; test_loss=177.01141357421875
Epoch24 train_loss=66.55912780761719; test_loss=177.7340545654297
Epoch25 train_loss=93.68882751464844; test_loss=182.14500427246094
Epoch26 train_loss=128.3892059326172; test_loss=203.14935302734375
Epoch27 train_loss=63.189483642578125; test_loss=181.7900390625
Epoch28 train_loss=55.742347717285156; test_loss=200.2317352294922
Epoch29 train_loss=225.35037231445312; test_loss=210.78939819335938
Epoch30 train_loss=202.51971435546875; test_loss=172.7822265625
Epoch31 train_loss=155.1397705078125; test_loss=183.54986572265625
Epoch32 train_loss=118.56977844238281; test_loss=206.71383666992188
Epoch33 train_loss=290.935546875; test_loss=196.05780029296875
Epoch34 train_loss=202.2840576171875; test_loss=174.84429931640625
Epoch35 train_loss=80.23258972167969; test_loss=171.09909057617188
Epoch36 train_loss=134.9010467529297; test_loss=199.74972534179688
Epoch37 train_loss=117.81919860839844; test_loss=177.15185546875
Epoch38 train_loss=39.594730377197266; test_loss=177.1085968017578
Epoch39 train_loss=159.92596435546875; test_loss=172.5272674560547
Epoch40 train_loss=377.0839538574219; test_loss=174.20089721679688
Epoch41 train_loss=134.07090759277344; test_loss=171.73825073242188
Epoch42 train_loss=59.053977966308594; test_loss=187.55921936035156
Epoch43 train_loss=201.49551391601562; test_loss=214.5355987548828
Epoch44 train_loss=97.16355895996094; test_loss=175.29689025878906
Epoch45 train_loss=65.73574829101562; test_loss=182.33432006835938
Epoch46 train_loss=93.44949340820312; test_loss=171.37501525878906
Epoch47 train_loss=41.08422088623047; test_loss=169.11660766601562
Epoch48 train_loss=158.87985229492188; test_loss=169.61231994628906
Epoch49 train_loss=149.05035400390625; test_loss=170.1109619140625
Epoch50 train_loss=157.15293884277344; test_loss=206.92788696289062
Epoch51 train_loss=141.11544799804688; test_loss=177.67970275878906
Epoch52 train_loss=124.99960327148438; test_loss=173.96363830566406
Epoch53 train_loss=84.21087646484375; test_loss=165.70187377929688
Epoch54 train_loss=145.54798889160156; test_loss=167.3758544921875
Epoch55 train_loss=311.67608642578125; test_loss=175.76463317871094
Epoch56 train_loss=153.96197509765625; test_loss=187.38670349121094
Epoch57 train_loss=135.63088989257812; test_loss=194.7633514404297
Epoch58 train_loss=182.60003662109375; test_loss=170.79017639160156
Epoch59 train_loss=84.61817932128906; test_loss=171.19720458984375
Epoch60 train_loss=166.8036651611328; test_loss=169.00686645507812
Epoch61 train_loss=88.00772094726562; test_loss=165.5081024169922
Epoch62 train_loss=150.74838256835938; test_loss=171.13897705078125
Epoch63 train_loss=172.20028686523438; test_loss=182.8035125732422
Epoch64 train_loss=58.75439453125; test_loss=180.3388214111328
Epoch65 train_loss=90.25057220458984; test_loss=164.5250244140625
Epoch66 train_loss=107.04623413085938; test_loss=165.0173797607422
Epoch67 train_loss=114.81871795654297; test_loss=174.67918395996094
Epoch68 train_loss=128.964111328125; test_loss=179.9058074951172
Epoch69 train_loss=152.63638305664062; test_loss=164.9073486328125
Epoch70 train_loss=638.1132202148438; test_loss=167.7833709716797
Epoch71 train_loss=431.02557373046875; test_loss=167.25807189941406
Epoch72 train_loss=54.51765441894531; test_loss=166.26116943359375
Epoch73 train_loss=204.30935668945312; test_loss=172.09861755371094
Epoch74 train_loss=210.50961303710938; test_loss=190.6043701171875
Epoch75 train_loss=143.07666015625; test_loss=171.51083374023438
Epoch76 train_loss=78.08426666259766; test_loss=162.56365966796875
Epoch77 train_loss=74.69149780273438; test_loss=163.27491760253906
Epoch78 train_loss=59.15687942504883; test_loss=170.1017303466797
Epoch79 train_loss=33.30712890625; test_loss=173.9085235595703
Epoch80 train_loss=90.52943420410156; test_loss=169.85690307617188
Epoch81 train_loss=257.80035400390625; test_loss=166.24398803710938
Epoch82 train_loss=162.74859619140625; test_loss=175.18460083007812
Epoch83 train_loss=130.36880493164062; test_loss=166.8652801513672
Epoch84 train_loss=138.11032104492188; test_loss=169.28208923339844
Epoch85 train_loss=81.07342529296875; test_loss=165.55441284179688
Epoch86 train_loss=153.65158081054688; test_loss=168.24761962890625
Epoch87 train_loss=163.3715057373047; test_loss=167.07369995117188
Epoch88 train_loss=73.44047546386719; test_loss=166.88372802734375
Epoch89 train_loss=141.0819091796875; test_loss=164.65985107421875
Epoch90 train_loss=224.21820068359375; test_loss=169.6827850341797
Epoch91 train_loss=64.16302490234375; test_loss=164.37744140625
Epoch92 train_loss=103.17571258544922; test_loss=163.0834503173828
Epoch93 train_loss=146.0531005859375; test_loss=185.23794555664062
Epoch94 train_loss=121.82118225097656; test_loss=165.230224609375
Epoch95 train_loss=84.47964477539062; test_loss=163.92428588867188
Epoch96 train_loss=143.49462890625; test_loss=166.6798095703125
Epoch97 train_loss=92.10292053222656; test_loss=162.78419494628906
Epoch98 train_loss=302.1296081542969; test_loss=164.77127075195312
Epoch99 train_loss=485.081298828125; test_loss=164.7592315673828
Epoch100 train_loss=184.67156982421875; test_loss=180.20419311523438
Epoch101 train_loss=204.49826049804688; test_loss=190.5532989501953
Epoch102 train_loss=61.803192138671875; test_loss=160.07284545898438
Epoch103 train_loss=120.80375671386719; test_loss=162.5617218017578
Epoch104 train_loss=74.41122436523438; test_loss=170.65496826171875
Epoch105 train_loss=454.76336669921875; test_loss=172.3622283935547
Epoch106 train_loss=96.92913055419922; test_loss=164.7119140625
Epoch107 train_loss=87.48768615722656; test_loss=180.22055053710938
Epoch108 train_loss=153.2032012939453; test_loss=165.6590118408203
Epoch109 train_loss=82.49879455566406; test_loss=168.74826049804688
Epoch110 train_loss=263.1418762207031; test_loss=163.97555541992188
Epoch111 train_loss=250.20489501953125; test_loss=167.2223358154297
Epoch112 train_loss=349.3670959472656; test_loss=173.1105499267578
Epoch113 train_loss=200.150634765625; test_loss=168.29220581054688
Epoch114 train_loss=223.35997009277344; test_loss=169.8140411376953
Epoch115 train_loss=107.36114501953125; test_loss=171.22390747070312
Epoch116 train_loss=378.6036682128906; test_loss=168.2406463623047
Epoch117 train_loss=107.5919189453125; test_loss=183.09027099609375
Epoch118 train_loss=361.9572448730469; test_loss=163.2520294189453
Epoch119 train_loss=142.1504364013672; test_loss=167.11268615722656
Epoch120 train_loss=35.421669006347656; test_loss=174.31427001953125
Epoch121 train_loss=146.2100830078125; test_loss=162.89138793945312
Epoch122 train_loss=163.7344970703125; test_loss=164.6154327392578
Epoch123 train_loss=142.90386962890625; test_loss=170.9564666748047
Epoch124 train_loss=96.83019256591797; test_loss=179.349853515625
Epoch125 train_loss=286.45343017578125; test_loss=165.80532836914062
Epoch126 train_loss=203.36036682128906; test_loss=166.63722229003906
Epoch127 train_loss=303.12109375; test_loss=181.1637725830078
Epoch128 train_loss=221.11676025390625; test_loss=168.5399932861328
Epoch129 train_loss=345.0054931640625; test_loss=181.3080596923828
Epoch130 train_loss=242.55474853515625; test_loss=165.50076293945312
Epoch131 train_loss=195.65692138671875; test_loss=163.82440185546875
Epoch132 train_loss=81.52851104736328; test_loss=186.48504638671875
Epoch133 train_loss=381.33978271484375; test_loss=169.01402282714844
Epoch134 train_loss=124.3758773803711; test_loss=166.37428283691406
Epoch135 train_loss=190.80953979492188; test_loss=164.7088165283203
Epoch136 train_loss=64.52540588378906; test_loss=168.22418212890625
Epoch137 train_loss=211.31256103515625; test_loss=160.81398010253906
Epoch138 train_loss=163.90074157714844; test_loss=165.04237365722656
Epoch139 train_loss=269.791748046875; test_loss=166.6909942626953
Epoch140 train_loss=41.16020202636719; test_loss=166.3368682861328
Epoch141 train_loss=153.02294921875; test_loss=165.24258422851562
Epoch142 train_loss=157.42649841308594; test_loss=159.3066864013672
Epoch143 train_loss=67.55802154541016; test_loss=166.42344665527344
Epoch144 train_loss=364.6539306640625; test_loss=162.77786254882812
Epoch145 train_loss=183.84097290039062; test_loss=164.91738891601562
Epoch146 train_loss=49.773834228515625; test_loss=162.9069061279297
Epoch147 train_loss=240.33328247070312; test_loss=164.68275451660156
Epoch148 train_loss=76.01213073730469; test_loss=165.98854064941406
Epoch149 train_loss=57.75843811035156; test_loss=167.70611572265625
Epoch150 train_loss=213.46746826171875; test_loss=172.6592559814453
Epoch151 train_loss=71.63129425048828; test_loss=163.7088165283203
Epoch152 train_loss=76.28173828125; test_loss=168.36761474609375
Epoch153 train_loss=241.4484405517578; test_loss=165.73583984375
Epoch154 train_loss=148.0570831298828; test_loss=169.0037078857422
Epoch155 train_loss=50.21533966064453; test_loss=162.1598358154297
Epoch156 train_loss=134.4282989501953; test_loss=171.50491333007812
Epoch157 train_loss=148.53646850585938; test_loss=175.2638702392578
Epoch158 train_loss=65.18512725830078; test_loss=162.68829345703125
Epoch159 train_loss=82.94412994384766; test_loss=164.93988037109375
Epoch160 train_loss=81.54330444335938; test_loss=166.43899536132812
Epoch161 train_loss=110.59577941894531; test_loss=169.301513671875
Epoch162 train_loss=53.461669921875; test_loss=170.59228515625
Epoch163 train_loss=136.16232299804688; test_loss=166.67112731933594
Epoch164 train_loss=158.259033203125; test_loss=164.28872680664062
Epoch165 train_loss=195.00672912597656; test_loss=164.3240203857422
Epoch166 train_loss=69.14166259765625; test_loss=162.94342041015625
Epoch167 train_loss=94.21288299560547; test_loss=160.99874877929688
Epoch168 train_loss=151.5680694580078; test_loss=161.85841369628906
Epoch169 train_loss=99.44414520263672; test_loss=162.7458953857422
Epoch170 train_loss=63.412315368652344; test_loss=162.6697540283203
Epoch171 train_loss=164.94110107421875; test_loss=172.0388946533203
Epoch172 train_loss=231.47506713867188; test_loss=166.08135986328125
Epoch173 train_loss=107.4571533203125; test_loss=194.96229553222656
Epoch174 train_loss=64.36561584472656; test_loss=169.5973663330078
Epoch175 train_loss=326.02288818359375; test_loss=167.3031463623047
Epoch176 train_loss=62.164024353027344; test_loss=162.93238830566406
Epoch177 train_loss=153.7225341796875; test_loss=165.28939819335938
Epoch178 train_loss=18.863182067871094; test_loss=164.0782928466797
Epoch179 train_loss=67.84473419189453; test_loss=162.1746063232422
Epoch180 train_loss=309.7794189453125; test_loss=162.46229553222656
Epoch181 train_loss=175.59988403320312; test_loss=160.04373168945312
Epoch182 train_loss=329.873779296875; test_loss=162.45567321777344
Epoch183 train_loss=117.16903686523438; test_loss=162.2509307861328
Epoch184 train_loss=130.17904663085938; test_loss=169.6253204345703
Epoch185 train_loss=169.65122985839844; test_loss=169.35289001464844
Epoch186 train_loss=57.90837860107422; test_loss=164.60914611816406
Epoch187 train_loss=170.043212890625; test_loss=165.26185607910156
Epoch188 train_loss=92.29769897460938; test_loss=170.75563049316406
Epoch189 train_loss=139.1962890625; test_loss=161.74403381347656
Epoch190 train_loss=57.92038345336914; test_loss=161.7471466064453
Epoch191 train_loss=53.40925979614258; test_loss=162.70504760742188
Epoch192 train_loss=172.87538146972656; test_loss=170.8238525390625
Epoch193 train_loss=170.35658264160156; test_loss=162.78546142578125
Epoch194 train_loss=55.55829620361328; test_loss=165.540283203125
Epoch195 train_loss=173.87930297851562; test_loss=164.4542999267578
Epoch196 train_loss=113.68939971923828; test_loss=163.8276824951172
Epoch197 train_loss=111.08441162109375; test_loss=164.6273193359375
Epoch198 train_loss=110.88909912109375; test_loss=161.87100219726562
Epoch199 train_loss=21.32333755493164; test_loss=161.5483856201172
Epoch200 train_loss=173.14186096191406; test_loss=160.88546752929688
Epoch201 train_loss=385.8165283203125; test_loss=159.2997283935547
Epoch202 train_loss=187.82901000976562; test_loss=158.69952392578125
Epoch203 train_loss=202.58456420898438; test_loss=163.65884399414062
Epoch204 train_loss=110.45840454101562; test_loss=161.8212432861328
Epoch205 train_loss=514.4365844726562; test_loss=165.16360473632812
Epoch206 train_loss=155.46641540527344; test_loss=176.30291748046875
Epoch207 train_loss=171.811767578125; test_loss=161.66915893554688
Epoch208 train_loss=78.18744659423828; test_loss=161.070068359375
Epoch209 train_loss=110.79486083984375; test_loss=175.95916748046875
Epoch210 train_loss=94.79419708251953; test_loss=160.6498260498047
Epoch211 train_loss=166.986328125; test_loss=166.230224609375
Epoch212 train_loss=7.7677764892578125; test_loss=165.9629364013672
Epoch213 train_loss=45.71687698364258; test_loss=166.20587158203125
Epoch214 train_loss=120.8321533203125; test_loss=161.35446166992188
Epoch215 train_loss=328.43603515625; test_loss=159.40072631835938
Epoch216 train_loss=51.70781707763672; test_loss=167.2311553955078
Epoch217 train_loss=120.1690444946289; test_loss=167.23333740234375
Epoch218 train_loss=56.25442123413086; test_loss=161.20428466796875
Epoch219 train_loss=236.00389099121094; test_loss=170.61099243164062
Epoch220 train_loss=127.5021743774414; test_loss=164.80068969726562
Epoch221 train_loss=172.87945556640625; test_loss=161.43930053710938
Epoch222 train_loss=32.001747131347656; test_loss=163.80081176757812
Epoch223 train_loss=210.75811767578125; test_loss=164.9922637939453
Epoch224 train_loss=229.01007080078125; test_loss=168.58535766601562
Epoch225 train_loss=223.04232788085938; test_loss=161.0998077392578
Epoch226 train_loss=77.15223693847656; test_loss=165.6102752685547
Epoch227 train_loss=198.9735565185547; test_loss=164.14990234375
Epoch228 train_loss=93.65501403808594; test_loss=163.8566131591797
Epoch229 train_loss=82.32251739501953; test_loss=166.255615234375
Epoch230 train_loss=286.1318054199219; test_loss=161.36260986328125
Epoch231 train_loss=100.73763275146484; test_loss=166.1735382080078
Epoch232 train_loss=63.70570373535156; test_loss=166.10743713378906
Epoch233 train_loss=62.4642333984375; test_loss=162.9131317138672
Epoch234 train_loss=221.40133666992188; test_loss=162.03250122070312
Epoch235 train_loss=40.654884338378906; test_loss=163.08456420898438
Epoch236 train_loss=52.08818817138672; test_loss=164.13955688476562
Epoch237 train_loss=101.07242584228516; test_loss=167.4446563720703
Epoch238 train_loss=66.23622131347656; test_loss=167.60865783691406
Epoch239 train_loss=81.36642456054688; test_loss=166.43577575683594
Epoch240 train_loss=295.1903381347656; test_loss=173.31640625
Epoch241 train_loss=63.882835388183594; test_loss=162.87759399414062
Epoch242 train_loss=127.27268981933594; test_loss=164.14932250976562
Epoch243 train_loss=122.13130950927734; test_loss=173.49667358398438
Epoch244 train_loss=155.09214782714844; test_loss=166.6199493408203
Epoch245 train_loss=106.93472290039062; test_loss=164.6615447998047
Epoch246 train_loss=51.51144790649414; test_loss=158.96954345703125
Epoch247 train_loss=60.47575378417969; test_loss=167.6519317626953
Epoch248 train_loss=196.6871795654297; test_loss=163.5343780517578
Epoch249 train_loss=100.65290832519531; test_loss=163.6652374267578
Epoch250 train_loss=195.0903778076172; test_loss=161.76751708984375
Epoch251 train_loss=84.76937866210938; test_loss=165.68820190429688
Epoch252 train_loss=153.1739044189453; test_loss=165.97091674804688
Epoch253 train_loss=167.47201538085938; test_loss=165.9927215576172
Epoch254 train_loss=142.17918395996094; test_loss=167.3263702392578
Epoch255 train_loss=301.5807189941406; test_loss=164.22923278808594
Epoch256 train_loss=52.18536376953125; test_loss=167.18215942382812
Epoch257 train_loss=162.08514404296875; test_loss=170.9807891845703
Epoch258 train_loss=100.80824279785156; test_loss=168.32997131347656
Epoch259 train_loss=39.069068908691406; test_loss=163.44534301757812
Epoch260 train_loss=160.635009765625; test_loss=171.61749267578125
Epoch261 train_loss=81.1020736694336; test_loss=164.01092529296875
Epoch262 train_loss=44.10032653808594; test_loss=168.71517944335938
Epoch263 train_loss=213.7645721435547; test_loss=172.456298828125
Epoch264 train_loss=548.3701171875; test_loss=163.41151428222656
Epoch265 train_loss=233.59695434570312; test_loss=162.07958984375
Epoch266 train_loss=147.23272705078125; test_loss=162.21481323242188
Epoch267 train_loss=242.95956420898438; test_loss=158.0032958984375
Epoch268 train_loss=56.674007415771484; test_loss=164.37454223632812
Epoch269 train_loss=546.436767578125; test_loss=162.36093139648438
Epoch270 train_loss=279.0970458984375; test_loss=164.40272521972656
Epoch271 train_loss=49.73223876953125; test_loss=159.98248291015625
Epoch272 train_loss=171.9744415283203; test_loss=168.1089630126953
Epoch273 train_loss=216.70603942871094; test_loss=164.89471435546875
Epoch274 train_loss=187.45240783691406; test_loss=161.8623809814453
Epoch275 train_loss=214.95059204101562; test_loss=168.0354461669922
Epoch276 train_loss=41.54658126831055; test_loss=167.55064392089844
Epoch277 train_loss=185.19210815429688; test_loss=158.59231567382812
Epoch278 train_loss=122.70420837402344; test_loss=161.1947021484375
Epoch279 train_loss=353.719482421875; test_loss=171.7982177734375
Epoch280 train_loss=87.18653869628906; test_loss=164.78651428222656
Epoch281 train_loss=72.61697387695312; test_loss=168.23756408691406
Epoch282 train_loss=100.5133056640625; test_loss=160.99916076660156
Epoch283 train_loss=127.70616149902344; test_loss=163.4879150390625
Epoch284 train_loss=99.89840698242188; test_loss=163.97459411621094
Epoch285 train_loss=77.32777404785156; test_loss=163.62745666503906
Epoch286 train_loss=124.25369262695312; test_loss=164.14772033691406
Epoch287 train_loss=174.0499267578125; test_loss=159.297119140625
Epoch288 train_loss=86.14144134521484; test_loss=165.1277618408203
Epoch289 train_loss=94.00627136230469; test_loss=165.8257293701172
Epoch290 train_loss=140.72015380859375; test_loss=174.28659057617188
Epoch291 train_loss=256.7303161621094; test_loss=178.28317260742188
Epoch292 train_loss=258.34515380859375; test_loss=184.3199462890625
Epoch293 train_loss=241.98175048828125; test_loss=163.60321044921875
Epoch294 train_loss=104.48750305175781; test_loss=172.2147979736328
Epoch295 train_loss=141.42974853515625; test_loss=163.1754913330078
Epoch296 train_loss=87.97913360595703; test_loss=163.3909454345703
Epoch297 train_loss=122.6553955078125; test_loss=160.81448364257812
Epoch298 train_loss=171.87966918945312; test_loss=164.93472290039062
Epoch299 train_loss=151.6804656982422; test_loss=164.1793670654297
Epoch300 train_loss=119.67147827148438; test_loss=161.9600067138672
Epoch301 train_loss=39.46073913574219; test_loss=162.4904327392578
Epoch302 train_loss=166.9513397216797; test_loss=162.8325653076172
Epoch303 train_loss=101.21731567382812; test_loss=163.09521484375
Epoch304 train_loss=433.0119323730469; test_loss=161.4409637451172
Epoch305 train_loss=216.48870849609375; test_loss=168.8541259765625
Epoch306 train_loss=220.6114959716797; test_loss=167.20382690429688
Epoch307 train_loss=129.5155029296875; test_loss=166.97145080566406
Epoch308 train_loss=92.77903747558594; test_loss=163.03765869140625
Epoch309 train_loss=80.41839599609375; test_loss=167.42430114746094
Epoch310 train_loss=57.04071044921875; test_loss=164.92051696777344
Epoch311 train_loss=102.43417358398438; test_loss=163.82545471191406
Epoch312 train_loss=333.95367431640625; test_loss=168.83877563476562
Epoch313 train_loss=103.35455322265625; test_loss=164.4847869873047
Epoch314 train_loss=68.85281372070312; test_loss=165.78298950195312
Epoch315 train_loss=80.18937683105469; test_loss=166.1205291748047
Epoch316 train_loss=81.49607849121094; test_loss=164.09349060058594
Epoch317 train_loss=82.20712280273438; test_loss=163.4977569580078
Epoch318 train_loss=102.15370178222656; test_loss=164.87400817871094
Epoch319 train_loss=120.2528076171875; test_loss=174.18418884277344
Epoch320 train_loss=55.1064338684082; test_loss=165.2068328857422
Epoch321 train_loss=74.31202697753906; test_loss=164.17703247070312
Epoch322 train_loss=115.77204895019531; test_loss=169.54763793945312
Epoch323 train_loss=80.14685821533203; test_loss=163.3780059814453
Epoch324 train_loss=105.07001495361328; test_loss=162.5233612060547
Epoch325 train_loss=96.73945617675781; test_loss=164.18960571289062
Epoch326 train_loss=90.47671508789062; test_loss=165.3106231689453
Epoch327 train_loss=23.71331787109375; test_loss=164.08006286621094
Epoch328 train_loss=64.39908599853516; test_loss=162.8795928955078
Epoch329 train_loss=62.39013671875; test_loss=168.87847900390625
Epoch330 train_loss=52.33369445800781; test_loss=164.3182373046875
Epoch331 train_loss=134.77833557128906; test_loss=167.17138671875
Epoch332 train_loss=188.9905242919922; test_loss=165.40260314941406
Epoch333 train_loss=129.34657287597656; test_loss=164.9362335205078
Epoch334 train_loss=81.84445190429688; test_loss=165.2362518310547
Epoch335 train_loss=193.9893798828125; test_loss=168.77340698242188
Epoch336 train_loss=136.70230102539062; test_loss=162.16534423828125
Epoch337 train_loss=36.458316802978516; test_loss=163.038330078125
Epoch338 train_loss=139.96624755859375; test_loss=162.9957733154297
Epoch339 train_loss=108.12320709228516; test_loss=166.03030395507812
Epoch340 train_loss=264.70263671875; test_loss=177.51405334472656
Epoch341 train_loss=228.31503295898438; test_loss=174.66815185546875
Epoch342 train_loss=31.970195770263672; test_loss=165.58358764648438
Epoch343 train_loss=138.36175537109375; test_loss=162.1240997314453
Epoch344 train_loss=171.58889770507812; test_loss=163.42523193359375
Epoch345 train_loss=66.47250366210938; test_loss=163.0779571533203
Epoch346 train_loss=120.99542236328125; test_loss=163.6021270751953
Epoch347 train_loss=117.25601959228516; test_loss=163.1210174560547
Epoch348 train_loss=80.22308349609375; test_loss=166.16043090820312
Epoch349 train_loss=145.9132080078125; test_loss=162.58734130859375
Epoch350 train_loss=25.68461036682129; test_loss=163.03944396972656
Epoch351 train_loss=26.98889923095703; test_loss=177.272216796875
Epoch352 train_loss=104.65174865722656; test_loss=165.29788208007812
Epoch353 train_loss=152.77423095703125; test_loss=163.22299194335938
Epoch354 train_loss=89.83143615722656; test_loss=162.70367431640625
Epoch355 train_loss=121.25886535644531; test_loss=163.30650329589844
Epoch356 train_loss=223.85421752929688; test_loss=169.3383026123047
Epoch357 train_loss=218.73788452148438; test_loss=170.7310791015625
Epoch358 train_loss=46.9085693359375; test_loss=167.7357940673828
Epoch359 train_loss=43.09811019897461; test_loss=166.50889587402344
Epoch360 train_loss=161.9058837890625; test_loss=161.90333557128906
Epoch361 train_loss=141.66732788085938; test_loss=164.67282104492188
Epoch362 train_loss=63.96600341796875; test_loss=162.22811889648438
Epoch363 train_loss=75.364501953125; test_loss=164.12234497070312
Epoch364 train_loss=38.190269470214844; test_loss=164.67784118652344
Epoch365 train_loss=123.04125213623047; test_loss=165.72222900390625
Epoch366 train_loss=129.90231323242188; test_loss=163.57005310058594
Epoch367 train_loss=15.985462188720703; test_loss=164.62545776367188
Epoch368 train_loss=112.01640319824219; test_loss=164.1414337158203
Epoch369 train_loss=108.72543334960938; test_loss=164.21856689453125
Epoch370 train_loss=152.473876953125; test_loss=165.01266479492188
Epoch371 train_loss=399.0870361328125; test_loss=165.78419494628906
Epoch372 train_loss=151.02578735351562; test_loss=178.7980499267578
Epoch373 train_loss=85.1654052734375; test_loss=163.55548095703125
Epoch374 train_loss=58.51276397705078; test_loss=165.58152770996094
Epoch375 train_loss=157.37002563476562; test_loss=164.76602172851562
Epoch376 train_loss=89.26374816894531; test_loss=165.11697387695312
Epoch377 train_loss=256.6785888671875; test_loss=168.22528076171875
Epoch378 train_loss=60.66281509399414; test_loss=165.43328857421875
Epoch379 train_loss=182.2101287841797; test_loss=161.42918395996094
Epoch380 train_loss=63.15855407714844; test_loss=169.8480987548828
Epoch381 train_loss=123.92648315429688; test_loss=162.8980712890625
Epoch382 train_loss=33.396728515625; test_loss=172.17291259765625
Epoch383 train_loss=52.45036315917969; test_loss=162.8729248046875
Epoch384 train_loss=164.460205078125; test_loss=163.7973175048828
Epoch385 train_loss=317.4938659667969; test_loss=170.79061889648438
Epoch386 train_loss=123.92556762695312; test_loss=169.7077178955078
Epoch387 train_loss=265.513671875; test_loss=166.08226013183594
Epoch388 train_loss=97.64744567871094; test_loss=170.1516571044922
Epoch389 train_loss=128.05799865722656; test_loss=163.62620544433594
Epoch390 train_loss=40.48027420043945; test_loss=165.42837524414062
Epoch391 train_loss=162.55850219726562; test_loss=163.4053955078125
Epoch392 train_loss=191.69900512695312; test_loss=162.405029296875
Epoch393 train_loss=376.8638610839844; test_loss=166.6243438720703
Epoch394 train_loss=127.99134826660156; test_loss=161.81918334960938
Epoch395 train_loss=42.33872985839844; test_loss=164.71527099609375
Epoch396 train_loss=57.49638748168945; test_loss=169.88162231445312
Epoch397 train_loss=83.65011596679688; test_loss=161.86907958984375
Epoch398 train_loss=140.87442016601562; test_loss=162.71240234375
Epoch399 train_loss=59.458534240722656; test_loss=161.358154296875
Epoch400 train_loss=184.28785705566406; test_loss=163.4476318359375
Epoch401 train_loss=79.77963256835938; test_loss=163.25759887695312
Epoch402 train_loss=56.223388671875; test_loss=177.87442016601562
Epoch403 train_loss=324.21038818359375; test_loss=165.0907440185547
Epoch404 train_loss=128.2250213623047; test_loss=164.05955505371094
Epoch405 train_loss=207.590087890625; test_loss=161.39833068847656
Epoch406 train_loss=141.56854248046875; test_loss=161.68321228027344
Epoch407 train_loss=84.23471069335938; test_loss=166.86099243164062
Epoch408 train_loss=84.84931945800781; test_loss=170.45376586914062
Epoch409 train_loss=71.83488464355469; test_loss=162.9379119873047
Epoch410 train_loss=39.85479736328125; test_loss=166.47801208496094
Epoch411 train_loss=114.2210693359375; test_loss=163.42039489746094
Epoch412 train_loss=62.49249267578125; test_loss=165.11619567871094
Epoch413 train_loss=291.07293701171875; test_loss=164.2895050048828
Epoch414 train_loss=60.994964599609375; test_loss=167.15281677246094
Epoch415 train_loss=123.19273376464844; test_loss=165.78688049316406
Epoch416 train_loss=106.53362274169922; test_loss=162.1890869140625
Epoch417 train_loss=52.274208068847656; test_loss=170.84478759765625
Epoch418 train_loss=121.20933532714844; test_loss=167.38629150390625
Epoch419 train_loss=53.18609619140625; test_loss=163.24122619628906
Epoch420 train_loss=221.49029541015625; test_loss=161.49671936035156
Epoch421 train_loss=25.114715576171875; test_loss=167.0068359375
Epoch422 train_loss=72.66636657714844; test_loss=163.30555725097656
Epoch423 train_loss=151.15611267089844; test_loss=163.69752502441406
Epoch424 train_loss=164.5638427734375; test_loss=165.1549835205078
Epoch425 train_loss=92.21389770507812; test_loss=166.26295471191406
Epoch426 train_loss=91.83708953857422; test_loss=170.8628692626953
Epoch427 train_loss=86.21302795410156; test_loss=165.74624633789062
Epoch428 train_loss=366.0190734863281; test_loss=165.541259765625
Epoch429 train_loss=140.01596069335938; test_loss=163.97206115722656
Epoch430 train_loss=32.77885437011719; test_loss=168.43458557128906
Epoch431 train_loss=55.3975715637207; test_loss=171.48228454589844
Epoch432 train_loss=74.435546875; test_loss=168.78970336914062
Epoch433 train_loss=77.20055389404297; test_loss=165.91798400878906
Epoch434 train_loss=365.6084899902344; test_loss=165.9298858642578
Epoch435 train_loss=42.32051086425781; test_loss=166.03746032714844
Epoch436 train_loss=75.88514709472656; test_loss=166.46194458007812
Epoch437 train_loss=135.06689453125; test_loss=168.3536376953125
Epoch438 train_loss=207.27127075195312; test_loss=167.2074432373047
Epoch439 train_loss=163.08169555664062; test_loss=176.86155700683594
Epoch440 train_loss=62.139862060546875; test_loss=164.8838348388672
Epoch441 train_loss=86.92817687988281; test_loss=162.818603515625
Epoch442 train_loss=47.59203338623047; test_loss=173.75437927246094
Epoch443 train_loss=122.49647521972656; test_loss=173.41575622558594
Epoch444 train_loss=94.68870544433594; test_loss=173.12490844726562
Epoch445 train_loss=65.6208267211914; test_loss=168.65895080566406
Epoch446 train_loss=103.0054702758789; test_loss=163.06497192382812
Epoch447 train_loss=260.75238037109375; test_loss=163.51388549804688
Epoch448 train_loss=72.63311004638672; test_loss=166.02133178710938
Epoch449 train_loss=163.0888214111328; test_loss=165.17750549316406
Epoch450 train_loss=386.3730163574219; test_loss=164.84689331054688
Epoch451 train_loss=90.84103393554688; test_loss=170.14773559570312
Epoch452 train_loss=157.58447265625; test_loss=172.69102478027344
Epoch453 train_loss=67.26539611816406; test_loss=167.23220825195312
Epoch454 train_loss=72.96452331542969; test_loss=167.33993530273438
Epoch455 train_loss=49.65321350097656; test_loss=164.20867919921875
Epoch456 train_loss=134.58738708496094; test_loss=167.5106658935547
Epoch457 train_loss=83.96897888183594; test_loss=164.26466369628906
Epoch458 train_loss=136.89755249023438; test_loss=166.26718139648438
Epoch459 train_loss=85.42108154296875; test_loss=165.48681640625
Epoch460 train_loss=297.95068359375; test_loss=163.69293212890625
Epoch461 train_loss=290.84674072265625; test_loss=164.19749450683594
Epoch462 train_loss=159.20608520507812; test_loss=167.80384826660156
Epoch463 train_loss=143.5340576171875; test_loss=164.24908447265625
Epoch464 train_loss=131.63392639160156; test_loss=165.00999450683594
Epoch465 train_loss=129.79107666015625; test_loss=174.1075439453125
Epoch466 train_loss=34.397483825683594; test_loss=162.2672882080078
Epoch467 train_loss=57.75324249267578; test_loss=171.3687286376953
Epoch468 train_loss=84.68925476074219; test_loss=163.58197021484375
Epoch469 train_loss=119.72830200195312; test_loss=166.61859130859375
Epoch470 train_loss=267.83648681640625; test_loss=163.2185821533203
Epoch471 train_loss=96.14952087402344; test_loss=167.50311279296875
Epoch472 train_loss=88.28357696533203; test_loss=166.06788635253906
Epoch473 train_loss=540.9403076171875; test_loss=169.79588317871094
Epoch474 train_loss=16.204368591308594; test_loss=162.72332763671875
Epoch475 train_loss=42.81001663208008; test_loss=166.3005828857422
Epoch476 train_loss=69.74586486816406; test_loss=172.54318237304688
Epoch477 train_loss=69.11968994140625; test_loss=169.7013702392578
Epoch478 train_loss=285.158447265625; test_loss=165.4213409423828
Epoch479 train_loss=263.7444152832031; test_loss=164.11752319335938
Epoch480 train_loss=244.7152862548828; test_loss=166.57347106933594
Epoch481 train_loss=101.2398452758789; test_loss=168.95823669433594
Epoch482 train_loss=147.3954315185547; test_loss=168.5584716796875
Epoch483 train_loss=295.50927734375; test_loss=168.02801513671875
Epoch484 train_loss=66.44602966308594; test_loss=168.61178588867188
Epoch485 train_loss=40.504234313964844; test_loss=170.3976593017578
Epoch486 train_loss=66.33895874023438; test_loss=165.0502471923828
Epoch487 train_loss=127.8127212524414; test_loss=167.662353515625
Epoch488 train_loss=89.51637268066406; test_loss=165.26780700683594
Epoch489 train_loss=105.68034362792969; test_loss=162.48928833007812
Epoch490 train_loss=71.39432525634766; test_loss=163.3619384765625
Epoch491 train_loss=69.56584167480469; test_loss=163.1156768798828
Epoch492 train_loss=69.23882293701172; test_loss=167.60494995117188
Epoch493 train_loss=44.513187408447266; test_loss=164.09889221191406
Epoch494 train_loss=204.108642578125; test_loss=164.9648895263672
Epoch495 train_loss=266.49993896484375; test_loss=168.45266723632812
Epoch496 train_loss=81.14156341552734; test_loss=165.2711181640625
Epoch497 train_loss=49.28092956542969; test_loss=163.96092224121094
Epoch498 train_loss=176.3229522705078; test_loss=164.4605255126953
Epoch499 train_loss=168.92193603515625; test_loss=166.7086944580078
Epoch500 train_loss=95.28424072265625; test_loss=164.33131408691406
Epoch501 train_loss=34.05635070800781; test_loss=165.91336059570312
Epoch502 train_loss=405.67279052734375; test_loss=168.96005249023438
Epoch503 train_loss=110.69308471679688; test_loss=162.0184326171875
Epoch504 train_loss=42.481937408447266; test_loss=167.96853637695312
Epoch505 train_loss=101.54833221435547; test_loss=164.05064392089844
Epoch506 train_loss=224.20669555664062; test_loss=167.0159912109375
Epoch507 train_loss=84.7254638671875; test_loss=167.58999633789062
Epoch508 train_loss=143.982177734375; test_loss=164.08384704589844
Epoch509 train_loss=20.28097915649414; test_loss=167.85791015625
Epoch510 train_loss=119.36604309082031; test_loss=163.38636779785156
Epoch511 train_loss=64.3329849243164; test_loss=166.0568084716797
Epoch512 train_loss=122.08056640625; test_loss=168.92294311523438
Epoch513 train_loss=195.2454376220703; test_loss=169.03134155273438
Epoch514 train_loss=207.58889770507812; test_loss=162.870849609375
Epoch515 train_loss=191.19064331054688; test_loss=164.06478881835938
Epoch516 train_loss=97.19236755371094; test_loss=167.2461700439453
Epoch517 train_loss=107.95040130615234; test_loss=167.05316162109375
Epoch518 train_loss=114.37751007080078; test_loss=170.2272491455078
Epoch519 train_loss=77.23358154296875; test_loss=163.94967651367188
Epoch520 train_loss=90.75455474853516; test_loss=162.16806030273438
Epoch521 train_loss=130.82611083984375; test_loss=161.5166015625
Epoch522 train_loss=78.77249145507812; test_loss=170.0148468017578
Epoch523 train_loss=60.30631637573242; test_loss=161.9775390625
Epoch524 train_loss=60.60139465332031; test_loss=165.07522583007812
Epoch525 train_loss=89.48075103759766; test_loss=162.32481384277344
Epoch526 train_loss=67.97567749023438; test_loss=164.46253967285156
Epoch527 train_loss=229.92332458496094; test_loss=168.09136962890625
Epoch528 train_loss=115.09483337402344; test_loss=166.53506469726562
Epoch529 train_loss=43.405181884765625; test_loss=167.86314392089844
Epoch530 train_loss=344.00103759765625; test_loss=165.4000244140625
Epoch531 train_loss=93.05044555664062; test_loss=170.0605010986328
Epoch532 train_loss=53.33757019042969; test_loss=165.28907775878906
Epoch533 train_loss=190.92431640625; test_loss=165.8144073486328
Epoch534 train_loss=198.6485595703125; test_loss=167.41859436035156
Epoch535 train_loss=126.88832092285156; test_loss=171.26930236816406
Epoch536 train_loss=92.66405487060547; test_loss=165.55638122558594
Epoch537 train_loss=91.93603515625; test_loss=163.52427673339844
Epoch538 train_loss=69.3505859375; test_loss=165.2274932861328
Epoch539 train_loss=82.7322998046875; test_loss=168.91481018066406
Epoch540 train_loss=17.50394058227539; test_loss=169.27011108398438
Epoch541 train_loss=32.8570556640625; test_loss=166.45263671875
Epoch542 train_loss=36.82524108886719; test_loss=166.88877868652344
Epoch543 train_loss=391.80072021484375; test_loss=164.95379638671875
Epoch544 train_loss=43.355369567871094; test_loss=168.03074645996094
Epoch545 train_loss=33.06733703613281; test_loss=166.1701202392578
Epoch546 train_loss=101.48392486572266; test_loss=166.2744598388672
Epoch547 train_loss=129.949951171875; test_loss=167.05026245117188
Epoch548 train_loss=248.58932495117188; test_loss=165.97836303710938
Epoch549 train_loss=44.84945297241211; test_loss=165.88087463378906
Epoch550 train_loss=128.03073120117188; test_loss=170.87684631347656
Epoch551 train_loss=233.77760314941406; test_loss=166.42967224121094
Epoch552 train_loss=131.760986328125; test_loss=173.50735473632812
Epoch553 train_loss=167.30616760253906; test_loss=164.40525817871094
Epoch554 train_loss=155.65635681152344; test_loss=168.5552978515625
Epoch555 train_loss=116.49178314208984; test_loss=172.92027282714844
Epoch556 train_loss=94.57073211669922; test_loss=165.2044219970703
Epoch557 train_loss=104.20625305175781; test_loss=165.56793212890625
Epoch558 train_loss=93.02105712890625; test_loss=163.8731231689453
Epoch559 train_loss=156.41380310058594; test_loss=168.1843719482422
Epoch560 train_loss=49.239105224609375; test_loss=169.22463989257812
Epoch561 train_loss=126.20044708251953; test_loss=168.33282470703125
Epoch562 train_loss=58.00389099121094; test_loss=165.0109100341797
Epoch563 train_loss=312.92364501953125; test_loss=169.8401641845703
Epoch564 train_loss=128.5010986328125; test_loss=169.75965881347656
Epoch565 train_loss=165.8216552734375; test_loss=165.683349609375
Epoch566 train_loss=29.403820037841797; test_loss=166.12339782714844
Epoch567 train_loss=156.6363983154297; test_loss=172.17755126953125
Epoch568 train_loss=71.62834167480469; test_loss=168.68692016601562
Epoch569 train_loss=59.500274658203125; test_loss=166.15402221679688
Epoch570 train_loss=102.36231231689453; test_loss=169.21490478515625
Epoch571 train_loss=395.5327453613281; test_loss=165.37318420410156
Epoch572 train_loss=53.749229431152344; test_loss=164.57708740234375
Epoch573 train_loss=270.77886962890625; test_loss=165.68093872070312
Epoch574 train_loss=94.4588623046875; test_loss=167.4528350830078
Epoch575 train_loss=69.81401062011719; test_loss=165.58203125
Epoch576 train_loss=73.91688537597656; test_loss=170.62158203125
Epoch577 train_loss=48.776832580566406; test_loss=165.01217651367188
Epoch578 train_loss=95.9856185913086; test_loss=162.6156005859375
Epoch579 train_loss=48.42100524902344; test_loss=163.92633056640625
Epoch580 train_loss=53.56227111816406; test_loss=166.42697143554688
Epoch581 train_loss=80.28126525878906; test_loss=171.0489044189453
Epoch582 train_loss=142.04403686523438; test_loss=166.44619750976562
Epoch583 train_loss=35.79247283935547; test_loss=166.78070068359375
Epoch584 train_loss=236.12322998046875; test_loss=164.26966857910156
Epoch585 train_loss=181.7921142578125; test_loss=167.2019500732422
Epoch586 train_loss=127.41431427001953; test_loss=167.9160919189453
Epoch587 train_loss=92.23489379882812; test_loss=165.88571166992188
Epoch588 train_loss=29.483074188232422; test_loss=165.81369018554688
Epoch589 train_loss=142.53573608398438; test_loss=169.2302703857422
Epoch590 train_loss=129.3949432373047; test_loss=165.92922973632812
Epoch591 train_loss=136.17367553710938; test_loss=169.4707794189453
Epoch592 train_loss=146.10064697265625; test_loss=168.2310791015625
Epoch593 train_loss=65.1273193359375; test_loss=165.7943572998047
Epoch594 train_loss=94.51667785644531; test_loss=168.342529296875
Epoch595 train_loss=86.16026306152344; test_loss=167.2923126220703
Epoch596 train_loss=196.54721069335938; test_loss=166.55484008789062
Epoch597 train_loss=75.65972900390625; test_loss=167.3731231689453
Epoch598 train_loss=337.947021484375; test_loss=165.19125366210938
Epoch599 train_loss=59.402305603027344; test_loss=170.1634063720703
Epoch600 train_loss=10.623136520385742; test_loss=170.34458923339844
Epoch601 train_loss=113.19829559326172; test_loss=172.5079803466797
Epoch602 train_loss=90.40863037109375; test_loss=167.19509887695312
Epoch603 train_loss=26.998886108398438; test_loss=168.52989196777344
Epoch604 train_loss=55.71836853027344; test_loss=166.63941955566406
Epoch605 train_loss=205.7966766357422; test_loss=163.9673614501953
Epoch606 train_loss=185.01187133789062; test_loss=168.68240356445312
Epoch607 train_loss=62.11518859863281; test_loss=164.05615234375
Epoch608 train_loss=87.68313598632812; test_loss=164.38844299316406
Epoch609 train_loss=176.16700744628906; test_loss=163.29840087890625
Epoch610 train_loss=55.04998779296875; test_loss=168.99867248535156
Epoch611 train_loss=50.86650085449219; test_loss=170.56793212890625
Epoch612 train_loss=104.70946502685547; test_loss=169.01449584960938
Epoch613 train_loss=94.28950500488281; test_loss=169.764404296875
Epoch614 train_loss=62.08025360107422; test_loss=171.4986572265625
Epoch615 train_loss=33.657981872558594; test_loss=166.27972412109375
Epoch616 train_loss=308.68914794921875; test_loss=190.23666381835938
Epoch617 train_loss=35.58610153198242; test_loss=171.12051391601562
Epoch618 train_loss=213.19485473632812; test_loss=164.70387268066406
Epoch619 train_loss=117.20097351074219; test_loss=167.6554412841797
Epoch620 train_loss=52.177005767822266; test_loss=165.86854553222656
Epoch621 train_loss=36.01973342895508; test_loss=164.34222412109375
Epoch622 train_loss=65.46503448486328; test_loss=166.09234619140625
Epoch623 train_loss=125.60678100585938; test_loss=163.53631591796875
Epoch624 train_loss=67.3740234375; test_loss=164.6291961669922
Epoch625 train_loss=198.23104858398438; test_loss=166.362060546875
Epoch626 train_loss=60.90520095825195; test_loss=165.43431091308594
Epoch627 train_loss=170.79852294921875; test_loss=171.85446166992188
Epoch628 train_loss=74.87928771972656; test_loss=167.5084228515625
Epoch629 train_loss=43.82542037963867; test_loss=171.02206420898438
Epoch630 train_loss=82.28436279296875; test_loss=168.82022094726562
Epoch631 train_loss=84.94547271728516; test_loss=166.31475830078125
Epoch632 train_loss=391.19537353515625; test_loss=163.46409606933594
Epoch633 train_loss=151.17056274414062; test_loss=190.23233032226562
Epoch634 train_loss=170.51821899414062; test_loss=167.92022705078125
Epoch635 train_loss=69.88438415527344; test_loss=170.7629852294922
Epoch636 train_loss=180.15066528320312; test_loss=172.89398193359375
Epoch637 train_loss=53.82042694091797; test_loss=169.4843292236328
Epoch638 train_loss=69.32736206054688; test_loss=167.8043975830078
Epoch639 train_loss=32.31800842285156; test_loss=181.92544555664062
Epoch640 train_loss=34.70600509643555; test_loss=167.38937377929688
Epoch641 train_loss=33.283447265625; test_loss=167.2130889892578
Epoch642 train_loss=143.530517578125; test_loss=169.201416015625
Epoch643 train_loss=302.66900634765625; test_loss=170.5723419189453
Epoch644 train_loss=95.189697265625; test_loss=170.98086547851562
Epoch645 train_loss=160.19943237304688; test_loss=169.07879638671875
Epoch646 train_loss=143.93582153320312; test_loss=169.6660614013672
Epoch647 train_loss=236.09033203125; test_loss=167.34764099121094
Epoch648 train_loss=16.6624698638916; test_loss=168.60435485839844
Epoch649 train_loss=40.58976745605469; test_loss=168.2864990234375
Epoch650 train_loss=99.77082824707031; test_loss=167.81849670410156
Epoch651 train_loss=109.22715759277344; test_loss=172.51123046875
Epoch652 train_loss=54.108917236328125; test_loss=173.1648406982422
Epoch653 train_loss=82.64188385009766; test_loss=166.01373291015625
Epoch654 train_loss=131.90753173828125; test_loss=170.55264282226562
Epoch655 train_loss=109.25121307373047; test_loss=167.698974609375
Epoch656 train_loss=73.24310302734375; test_loss=173.30174255371094
Epoch657 train_loss=214.9285888671875; test_loss=167.8134765625
Epoch658 train_loss=246.68312072753906; test_loss=165.9333953857422
Epoch659 train_loss=87.27334594726562; test_loss=167.41700744628906
Epoch660 train_loss=97.10816955566406; test_loss=168.66973876953125
Epoch661 train_loss=88.74313354492188; test_loss=168.78819274902344
Epoch662 train_loss=221.15399169921875; test_loss=168.027099609375
Epoch663 train_loss=143.18048095703125; test_loss=172.56088256835938
Epoch664 train_loss=107.04071044921875; test_loss=170.16009521484375
Epoch665 train_loss=67.287109375; test_loss=166.40252685546875
Epoch666 train_loss=216.54727172851562; test_loss=165.79034423828125
Epoch667 train_loss=52.57714080810547; test_loss=173.1593475341797
Epoch668 train_loss=62.870086669921875; test_loss=165.833984375
Epoch669 train_loss=164.6327667236328; test_loss=164.9351348876953
Epoch670 train_loss=164.36935424804688; test_loss=174.03762817382812
Epoch671 train_loss=66.52916717529297; test_loss=167.062255859375
Epoch672 train_loss=91.59703826904297; test_loss=172.90818786621094
Epoch673 train_loss=174.79505920410156; test_loss=166.90277099609375
Epoch674 train_loss=137.0461883544922; test_loss=167.93426513671875
Epoch675 train_loss=55.546791076660156; test_loss=169.51536560058594
Epoch676 train_loss=84.74467468261719; test_loss=170.26593017578125
Epoch677 train_loss=90.9510269165039; test_loss=166.75592041015625
Epoch678 train_loss=10.91368579864502; test_loss=166.8138885498047
Epoch679 train_loss=67.6202621459961; test_loss=165.7217559814453
Epoch680 train_loss=74.11346435546875; test_loss=171.71583557128906
Epoch681 train_loss=58.07868194580078; test_loss=165.38758850097656
Epoch682 train_loss=67.38782501220703; test_loss=166.13868713378906
Epoch683 train_loss=112.36029052734375; test_loss=173.57064819335938
Epoch684 train_loss=235.129638671875; test_loss=166.71249389648438
Epoch685 train_loss=249.37246704101562; test_loss=163.11587524414062
Epoch686 train_loss=77.95226287841797; test_loss=166.8090057373047
Epoch687 train_loss=149.43203735351562; test_loss=174.6463165283203
Epoch688 train_loss=90.08894348144531; test_loss=164.058349609375
Epoch689 train_loss=34.812095642089844; test_loss=163.04444885253906
Epoch690 train_loss=69.78691101074219; test_loss=175.41458129882812
Epoch691 train_loss=401.3391418457031; test_loss=166.80166625976562
Epoch692 train_loss=122.81880187988281; test_loss=167.31109619140625
Epoch693 train_loss=237.20086669921875; test_loss=167.63771057128906
Epoch694 train_loss=78.309814453125; test_loss=167.51600646972656
Epoch695 train_loss=213.96658325195312; test_loss=169.45213317871094
Epoch696 train_loss=96.77980041503906; test_loss=166.56411743164062
Epoch697 train_loss=26.789230346679688; test_loss=166.04127502441406
Epoch698 train_loss=91.49627685546875; test_loss=166.0214385986328
Epoch699 train_loss=58.784515380859375; test_loss=169.00717163085938
Epoch700 train_loss=44.786041259765625; test_loss=167.14376831054688
Epoch701 train_loss=147.39944458007812; test_loss=169.30604553222656
Epoch702 train_loss=82.4192886352539; test_loss=166.73629760742188
Epoch703 train_loss=86.22586059570312; test_loss=169.2486114501953
Epoch704 train_loss=117.09803009033203; test_loss=171.98020935058594
Epoch705 train_loss=23.797216415405273; test_loss=169.9366912841797
Epoch706 train_loss=67.09304809570312; test_loss=167.68809509277344
Epoch707 train_loss=123.0377197265625; test_loss=167.69131469726562
Epoch708 train_loss=54.284996032714844; test_loss=166.93531799316406
Epoch709 train_loss=90.19387817382812; test_loss=167.88931274414062
Epoch710 train_loss=49.26546859741211; test_loss=172.5767364501953
Epoch711 train_loss=74.89558410644531; test_loss=167.84217834472656
Epoch712 train_loss=117.54168701171875; test_loss=170.24427795410156
Epoch713 train_loss=125.11041259765625; test_loss=169.46939086914062
Epoch714 train_loss=104.11398315429688; test_loss=164.67251586914062
Epoch715 train_loss=63.922264099121094; test_loss=175.02989196777344
Epoch716 train_loss=164.82809448242188; test_loss=167.6893310546875
Epoch717 train_loss=113.10639953613281; test_loss=174.50328063964844
Epoch718 train_loss=192.36404418945312; test_loss=169.55528259277344
Epoch719 train_loss=88.72669982910156; test_loss=168.9687957763672
Epoch720 train_loss=110.76195526123047; test_loss=165.68463134765625
Epoch721 train_loss=403.9303283691406; test_loss=166.8405303955078
Epoch722 train_loss=36.72468566894531; test_loss=168.94154357910156
Epoch723 train_loss=105.78306579589844; test_loss=168.87857055664062
Epoch724 train_loss=59.757781982421875; test_loss=165.70372009277344
Epoch725 train_loss=37.96759033203125; test_loss=168.6793670654297
Epoch726 train_loss=322.03955078125; test_loss=166.5427703857422
Epoch727 train_loss=54.14963912963867; test_loss=174.84857177734375
Epoch728 train_loss=35.94380187988281; test_loss=173.2272186279297
Epoch729 train_loss=266.6116027832031; test_loss=165.49624633789062
Epoch730 train_loss=39.591224670410156; test_loss=170.41441345214844
Epoch731 train_loss=97.89007568359375; test_loss=169.48097229003906
Epoch732 train_loss=48.69477844238281; test_loss=166.23899841308594
Epoch733 train_loss=56.79484558105469; test_loss=170.70530700683594
Epoch734 train_loss=123.71546936035156; test_loss=170.65890502929688
Epoch735 train_loss=224.00750732421875; test_loss=167.23548889160156
Epoch736 train_loss=78.82071685791016; test_loss=168.42457580566406
Epoch737 train_loss=55.910457611083984; test_loss=168.7245635986328
Epoch738 train_loss=129.4187469482422; test_loss=165.52755737304688
Epoch739 train_loss=28.02483367919922; test_loss=167.8732452392578
Epoch740 train_loss=60.149993896484375; test_loss=169.93508911132812
Epoch741 train_loss=120.28292083740234; test_loss=171.7563018798828
Epoch742 train_loss=128.394775390625; test_loss=170.17364501953125
Epoch743 train_loss=35.9050178527832; test_loss=165.70016479492188
Epoch744 train_loss=121.75592803955078; test_loss=172.2567138671875
Epoch745 train_loss=170.77882385253906; test_loss=170.3902587890625
Epoch746 train_loss=76.95127868652344; test_loss=163.01223754882812
Epoch747 train_loss=133.13198852539062; test_loss=173.28460693359375
Epoch748 train_loss=314.95556640625; test_loss=174.68516540527344
Epoch749 train_loss=72.5502700805664; test_loss=173.27120971679688
Epoch750 train_loss=162.0077667236328; test_loss=165.4742889404297
Epoch751 train_loss=135.53518676757812; test_loss=168.32691955566406
Epoch752 train_loss=123.423583984375; test_loss=172.55418395996094
Epoch753 train_loss=61.590538024902344; test_loss=169.15484619140625
Epoch754 train_loss=167.74652099609375; test_loss=168.3944854736328
Epoch755 train_loss=142.6802215576172; test_loss=173.6728057861328
Epoch756 train_loss=290.93048095703125; test_loss=168.08615112304688
Epoch757 train_loss=104.5121841430664; test_loss=168.12464904785156
Epoch758 train_loss=104.55673217773438; test_loss=166.83154296875
Epoch759 train_loss=119.30996704101562; test_loss=166.85665893554688
Epoch760 train_loss=84.6243896484375; test_loss=170.36471557617188
Epoch761 train_loss=55.586639404296875; test_loss=166.35076904296875
Epoch762 train_loss=483.8451843261719; test_loss=166.4187469482422
Epoch763 train_loss=43.92466354370117; test_loss=170.601318359375
Epoch764 train_loss=60.52581024169922; test_loss=173.31393432617188
Epoch765 train_loss=48.31085205078125; test_loss=172.44326782226562
Epoch766 train_loss=171.24911499023438; test_loss=172.57565307617188
Epoch767 train_loss=108.08016204833984; test_loss=169.0026092529297
Epoch768 train_loss=40.12031555175781; test_loss=165.9505615234375
Epoch769 train_loss=43.558326721191406; test_loss=174.03587341308594
Epoch770 train_loss=26.62981414794922; test_loss=168.43350219726562
Epoch771 train_loss=20.61859893798828; test_loss=169.14488220214844
Epoch772 train_loss=71.327392578125; test_loss=165.1102294921875
Epoch773 train_loss=72.00009155273438; test_loss=170.3782196044922
Epoch774 train_loss=111.24693298339844; test_loss=165.76925659179688
Epoch775 train_loss=239.60333251953125; test_loss=181.99407958984375
Epoch776 train_loss=64.28157043457031; test_loss=167.71730041503906
Epoch777 train_loss=359.2663879394531; test_loss=162.85467529296875
Epoch778 train_loss=137.51962280273438; test_loss=170.57864379882812
Epoch779 train_loss=35.26966857910156; test_loss=168.10130310058594
Epoch780 train_loss=104.5559310913086; test_loss=167.948974609375
Epoch781 train_loss=64.93766784667969; test_loss=162.7589874267578
Epoch782 train_loss=103.59529113769531; test_loss=164.86582946777344
Epoch783 train_loss=76.68795013427734; test_loss=168.89060974121094
Epoch784 train_loss=94.71891784667969; test_loss=168.00729370117188
Epoch785 train_loss=109.54661560058594; test_loss=170.9490509033203
Epoch786 train_loss=91.2100830078125; test_loss=169.4361114501953
Epoch787 train_loss=91.87706756591797; test_loss=166.51934814453125
Epoch788 train_loss=18.36040496826172; test_loss=165.7460174560547
Epoch789 train_loss=67.43148803710938; test_loss=167.3008575439453
Epoch790 train_loss=189.58883666992188; test_loss=173.3934783935547
Epoch791 train_loss=232.08303833007812; test_loss=169.81504821777344
Epoch792 train_loss=52.454566955566406; test_loss=163.77842712402344
Epoch793 train_loss=20.48473358154297; test_loss=166.23785400390625
Epoch794 train_loss=46.323944091796875; test_loss=165.52830505371094
Epoch795 train_loss=73.0819091796875; test_loss=167.67471313476562
Epoch796 train_loss=182.46490478515625; test_loss=168.77163696289062
Epoch797 train_loss=128.88845825195312; test_loss=166.79962158203125
Epoch798 train_loss=95.67671966552734; test_loss=169.43128967285156
Epoch799 train_loss=29.65179443359375; test_loss=179.77398681640625
Epoch800 train_loss=223.25572204589844; test_loss=169.42242431640625
Epoch801 train_loss=166.7602996826172; test_loss=169.31407165527344
Epoch802 train_loss=46.91896057128906; test_loss=172.10166931152344
Epoch803 train_loss=50.514434814453125; test_loss=172.9869842529297
Epoch804 train_loss=311.4479675292969; test_loss=168.2081298828125
Epoch805 train_loss=31.454345703125; test_loss=172.5666961669922
Epoch806 train_loss=93.40036010742188; test_loss=164.99696350097656
Epoch807 train_loss=79.3328857421875; test_loss=169.4049072265625
Epoch808 train_loss=71.3060531616211; test_loss=165.4198455810547
Epoch809 train_loss=7.7511749267578125; test_loss=168.3428192138672
Epoch810 train_loss=30.951812744140625; test_loss=167.25205993652344
Epoch811 train_loss=162.31719970703125; test_loss=170.47323608398438
Epoch812 train_loss=99.34928894042969; test_loss=170.62918090820312
Epoch813 train_loss=20.40808868408203; test_loss=168.3108367919922
Epoch814 train_loss=25.73324966430664; test_loss=168.18011474609375
Epoch815 train_loss=56.56983184814453; test_loss=171.2964630126953
Epoch816 train_loss=125.90185546875; test_loss=164.79684448242188
Epoch817 train_loss=85.44212341308594; test_loss=167.08184814453125
Epoch818 train_loss=67.00067901611328; test_loss=167.63475036621094
Epoch819 train_loss=77.93020629882812; test_loss=174.48391723632812
Epoch820 train_loss=353.16351318359375; test_loss=176.47537231445312
Epoch821 train_loss=77.70913696289062; test_loss=168.2839813232422
Epoch822 train_loss=75.22406005859375; test_loss=164.88133239746094
Epoch823 train_loss=33.948699951171875; test_loss=166.7749481201172
Epoch824 train_loss=237.96249389648438; test_loss=165.21926879882812
Epoch825 train_loss=104.92910766601562; test_loss=169.5131072998047
Epoch826 train_loss=123.38798522949219; test_loss=169.21914672851562
Epoch827 train_loss=31.44355010986328; test_loss=172.42286682128906
Epoch828 train_loss=48.15142822265625; test_loss=166.3983154296875
Epoch829 train_loss=74.72396087646484; test_loss=168.4449462890625
Epoch830 train_loss=387.3529357910156; test_loss=168.62127685546875
Epoch831 train_loss=49.125885009765625; test_loss=173.16412353515625
Epoch832 train_loss=142.3861846923828; test_loss=171.52806091308594
Epoch833 train_loss=71.55028533935547; test_loss=169.51272583007812
Epoch834 train_loss=98.97758483886719; test_loss=167.91848754882812
Epoch835 train_loss=98.25405883789062; test_loss=166.92019653320312
Epoch836 train_loss=36.73249816894531; test_loss=166.45945739746094
Epoch837 train_loss=141.39022827148438; test_loss=169.29861450195312
Epoch838 train_loss=76.98616790771484; test_loss=167.37698364257812
Epoch839 train_loss=114.90518188476562; test_loss=165.45545959472656
Epoch840 train_loss=100.78521728515625; test_loss=168.91494750976562
Epoch841 train_loss=39.42039489746094; test_loss=167.26856994628906
Epoch842 train_loss=30.835323333740234; test_loss=167.44178771972656
Epoch843 train_loss=159.8651123046875; test_loss=169.12705993652344
Epoch844 train_loss=169.308349609375; test_loss=167.91810607910156
Epoch845 train_loss=65.1744613647461; test_loss=169.78555297851562
Epoch846 train_loss=84.18283081054688; test_loss=165.8385772705078
Epoch847 train_loss=168.99087524414062; test_loss=168.23683166503906
Epoch848 train_loss=135.46044921875; test_loss=165.94833374023438
Epoch849 train_loss=103.36283111572266; test_loss=170.65203857421875
Epoch850 train_loss=121.5856704711914; test_loss=168.373046875
Epoch851 train_loss=103.49989318847656; test_loss=165.92303466796875
Epoch852 train_loss=168.60792541503906; test_loss=167.42030334472656
Epoch853 train_loss=30.871116638183594; test_loss=170.21670532226562
Epoch854 train_loss=65.70550537109375; test_loss=168.3366241455078
Epoch855 train_loss=100.55624389648438; test_loss=168.57383728027344
Epoch856 train_loss=90.8976058959961; test_loss=168.7120819091797
Epoch857 train_loss=192.87042236328125; test_loss=171.57008361816406
Epoch858 train_loss=24.69416046142578; test_loss=168.8271942138672
Epoch859 train_loss=100.58666229248047; test_loss=167.4458770751953
Epoch860 train_loss=156.64437866210938; test_loss=167.7859344482422
Epoch861 train_loss=56.12560272216797; test_loss=167.97760009765625
Epoch862 train_loss=140.7604522705078; test_loss=168.99539184570312
Epoch863 train_loss=187.61727905273438; test_loss=172.42919921875
Epoch864 train_loss=127.39584350585938; test_loss=169.73841857910156
Epoch865 train_loss=95.57141876220703; test_loss=171.2462921142578
Epoch866 train_loss=96.5755615234375; test_loss=167.51651000976562
Epoch867 train_loss=144.6802978515625; test_loss=168.10264587402344
Epoch868 train_loss=113.75733947753906; test_loss=165.18991088867188
Epoch869 train_loss=257.29388427734375; test_loss=168.50759887695312
Epoch870 train_loss=30.690475463867188; test_loss=179.34669494628906
Epoch871 train_loss=91.7518081665039; test_loss=169.1973419189453
Epoch872 train_loss=128.02462768554688; test_loss=167.02615356445312
Epoch873 train_loss=38.94298553466797; test_loss=171.53684997558594
Epoch874 train_loss=242.05697631835938; test_loss=164.97645568847656
Epoch875 train_loss=99.22091674804688; test_loss=168.88743591308594
Epoch876 train_loss=54.16387939453125; test_loss=166.47994995117188
Epoch877 train_loss=33.59615707397461; test_loss=170.6746368408203
Epoch878 train_loss=58.84706115722656; test_loss=168.9330291748047
Epoch879 train_loss=106.20643615722656; test_loss=169.64810180664062
Epoch880 train_loss=36.644744873046875; test_loss=169.440673828125
Epoch881 train_loss=84.0419921875; test_loss=169.41497802734375
Epoch882 train_loss=88.68582916259766; test_loss=166.87608337402344
Epoch883 train_loss=62.67417907714844; test_loss=168.53529357910156
Epoch884 train_loss=157.82476806640625; test_loss=168.977294921875
Epoch885 train_loss=152.41311645507812; test_loss=168.4359588623047
Epoch886 train_loss=125.12825012207031; test_loss=168.590087890625
Epoch887 train_loss=52.57504653930664; test_loss=174.480712890625
Epoch888 train_loss=22.512069702148438; test_loss=169.2889404296875
Epoch889 train_loss=169.80055236816406; test_loss=168.6106719970703
Epoch890 train_loss=394.9848937988281; test_loss=170.36172485351562
Epoch891 train_loss=64.33844757080078; test_loss=167.15992736816406
Epoch892 train_loss=122.86173248291016; test_loss=166.12940979003906
Epoch893 train_loss=63.223838806152344; test_loss=171.2291717529297
Epoch894 train_loss=343.12652587890625; test_loss=170.27749633789062
Epoch895 train_loss=53.470664978027344; test_loss=166.82083129882812
Epoch896 train_loss=63.439517974853516; test_loss=165.2768096923828
Epoch897 train_loss=98.80972290039062; test_loss=168.3187255859375
Epoch898 train_loss=323.36395263671875; test_loss=170.9481964111328
Epoch899 train_loss=80.02630615234375; test_loss=168.14012145996094
Epoch900 train_loss=70.26171875; test_loss=169.5446319580078
Epoch901 train_loss=36.998809814453125; test_loss=169.7222900390625
Epoch902 train_loss=138.04647827148438; test_loss=169.45286560058594
Epoch903 train_loss=234.19363403320312; test_loss=168.9634246826172
Epoch904 train_loss=82.1316146850586; test_loss=169.286865234375
Epoch905 train_loss=131.93661499023438; test_loss=169.21543884277344
Epoch906 train_loss=80.744140625; test_loss=173.24322509765625
Epoch907 train_loss=130.11044311523438; test_loss=172.87530517578125
Epoch908 train_loss=48.20553970336914; test_loss=169.89698791503906
Epoch909 train_loss=127.49729919433594; test_loss=168.22386169433594
Epoch910 train_loss=51.298763275146484; test_loss=167.1525115966797
Epoch911 train_loss=45.36656951904297; test_loss=165.89012145996094
Epoch912 train_loss=60.09135055541992; test_loss=169.29200744628906
Epoch913 train_loss=22.564659118652344; test_loss=165.95445251464844
Epoch914 train_loss=44.288665771484375; test_loss=171.13394165039062
Epoch915 train_loss=185.70553588867188; test_loss=172.24942016601562
Epoch916 train_loss=116.42236328125; test_loss=166.2777557373047
Epoch917 train_loss=118.93341827392578; test_loss=170.5911102294922
Epoch918 train_loss=218.27847290039062; test_loss=168.57632446289062
Epoch919 train_loss=79.55811309814453; test_loss=175.5357666015625
Epoch920 train_loss=220.16305541992188; test_loss=167.34588623046875
Epoch921 train_loss=199.70484924316406; test_loss=170.62887573242188
Epoch922 train_loss=18.60470962524414; test_loss=169.82566833496094
Epoch923 train_loss=108.85424041748047; test_loss=167.8758544921875
Epoch924 train_loss=105.07585144042969; test_loss=168.80093383789062
Epoch925 train_loss=193.8077850341797; test_loss=172.9694366455078
Epoch926 train_loss=156.46946716308594; test_loss=168.56353759765625
Epoch927 train_loss=56.01422882080078; test_loss=167.63055419921875
Epoch928 train_loss=131.08065795898438; test_loss=166.6891632080078
Epoch929 train_loss=117.38299560546875; test_loss=172.21875
Epoch930 train_loss=98.91456604003906; test_loss=174.14260864257812
Epoch931 train_loss=122.4019775390625; test_loss=174.446044921875
Epoch932 train_loss=29.10507583618164; test_loss=167.2700958251953
Epoch933 train_loss=151.3721160888672; test_loss=172.88743591308594
Epoch934 train_loss=23.9586181640625; test_loss=173.69442749023438
Epoch935 train_loss=314.6046142578125; test_loss=167.77264404296875
Epoch936 train_loss=53.0965690612793; test_loss=172.22164916992188
Epoch937 train_loss=106.66029357910156; test_loss=169.9083251953125
Epoch938 train_loss=104.02741241455078; test_loss=170.58151245117188
Epoch939 train_loss=150.327880859375; test_loss=165.52590942382812
Epoch940 train_loss=37.09879684448242; test_loss=169.6523895263672
Epoch941 train_loss=63.401451110839844; test_loss=174.0702362060547
Epoch942 train_loss=120.1020278930664; test_loss=167.3870391845703
Epoch943 train_loss=97.84154510498047; test_loss=171.4139404296875
Epoch944 train_loss=97.36027526855469; test_loss=171.27842712402344
Epoch945 train_loss=52.924659729003906; test_loss=168.97039794921875
Epoch946 train_loss=236.56561279296875; test_loss=169.3704833984375
Epoch947 train_loss=86.25883483886719; test_loss=173.53553771972656
Epoch948 train_loss=108.19207763671875; test_loss=170.86097717285156
Epoch949 train_loss=38.288978576660156; test_loss=167.85247802734375
Epoch950 train_loss=19.863731384277344; test_loss=170.11917114257812
Epoch951 train_loss=119.1142578125; test_loss=172.3660888671875
Epoch952 train_loss=89.74546813964844; test_loss=172.21542358398438
Epoch953 train_loss=141.7389373779297; test_loss=168.25711059570312
Epoch954 train_loss=39.667884826660156; test_loss=170.2116241455078
Epoch955 train_loss=431.675048828125; test_loss=170.27479553222656
Epoch956 train_loss=132.14915466308594; test_loss=169.76060485839844
Epoch957 train_loss=93.89959716796875; test_loss=168.15203857421875
Epoch958 train_loss=83.72544860839844; test_loss=170.63136291503906
Epoch959 train_loss=43.17474365234375; test_loss=178.4591827392578
Epoch960 train_loss=56.52799987792969; test_loss=166.8655548095703
Epoch961 train_loss=56.49891662597656; test_loss=168.03237915039062
Epoch962 train_loss=73.75381469726562; test_loss=168.7765350341797
Epoch963 train_loss=121.59088134765625; test_loss=167.89877319335938
Epoch964 train_loss=104.70342254638672; test_loss=166.686279296875
Epoch965 train_loss=15.417573928833008; test_loss=166.45066833496094
Epoch966 train_loss=77.81199645996094; test_loss=167.9193878173828
Epoch967 train_loss=32.98353576660156; test_loss=172.26622009277344
Epoch968 train_loss=27.415000915527344; test_loss=171.1028289794922
Epoch969 train_loss=112.43975067138672; test_loss=167.66696166992188
Epoch970 train_loss=48.006500244140625; test_loss=181.4803924560547
Epoch971 train_loss=110.02949523925781; test_loss=169.1517791748047
Epoch972 train_loss=83.07830810546875; test_loss=172.0324249267578
Epoch973 train_loss=73.52944946289062; test_loss=172.34410095214844
Epoch974 train_loss=74.334228515625; test_loss=172.7642822265625
Epoch975 train_loss=54.615455627441406; test_loss=170.19447326660156
Epoch976 train_loss=64.89441680908203; test_loss=168.75643920898438
Epoch977 train_loss=71.8236083984375; test_loss=166.48097229003906
Epoch978 train_loss=154.17327880859375; test_loss=164.59085083007812
Epoch979 train_loss=125.88191223144531; test_loss=169.101806640625
Epoch980 train_loss=58.488983154296875; test_loss=173.7602081298828
Epoch981 train_loss=38.74562072753906; test_loss=168.86814880371094
Epoch982 train_loss=108.42764282226562; test_loss=167.58131408691406
Epoch983 train_loss=33.930885314941406; test_loss=170.35157775878906
Epoch984 train_loss=106.59173583984375; test_loss=169.548095703125
Epoch985 train_loss=149.39395141601562; test_loss=170.8510284423828
Epoch986 train_loss=130.86029052734375; test_loss=167.88394165039062
Epoch987 train_loss=60.84172058105469; test_loss=170.97589111328125
Epoch988 train_loss=81.83094787597656; test_loss=173.30421447753906
Epoch989 train_loss=65.79107666015625; test_loss=173.23104858398438
Epoch990 train_loss=88.69274139404297; test_loss=172.0693817138672
Epoch991 train_loss=67.49601745605469; test_loss=167.8128204345703
Epoch992 train_loss=27.547714233398438; test_loss=169.27748107910156
Epoch993 train_loss=25.2454833984375; test_loss=174.8845672607422
Epoch994 train_loss=75.30319213867188; test_loss=172.2002716064453
Epoch995 train_loss=16.706966400146484; test_loss=168.24111938476562
Epoch996 train_loss=43.22922134399414; test_loss=177.97006225585938
Epoch997 train_loss=31.173324584960938; test_loss=170.94671630859375
Epoch998 train_loss=50.866817474365234; test_loss=174.6862335205078
Epoch999 train_loss=116.99969482421875; test_loss=166.95755004882812
Epoch1000 train_loss=62.47306823730469; test_loss=169.91616821289062
------------------------------------------- TRAINING SCORES -------------------------------------------
Overall MAE: 127.83843072414398 +/- 90.65328560853274
Overall RMSE: 171.06993522262573 +/- 137.64985868220109
Overall MAPE: 16.579783494234086 +/- 20.826968398715486
Overall R2: 69.75038325190545 +/- 105.27761032504858
------------------------------------------- TESTING SCORES -------------------------------------------
Overall MAE: 168.9494116973877 +/- 8.022559538075681
Overall RMSE: 277.84417306518554 +/- 9.953104843194573
Overall MAPE: 22.436886066436767 +/- 1.2793881256805162
Overall R2: 83.87500293254853 +/- 1.227900348513482
Wall time: 15min 11s
%%time
# Now we do 10-Fold CV on our CNN model with 45 Scaled vars:
# here are the best parameters for the 45 Scaled vars CNN hyperparameter search:
# Best Study Parameters:
# batch_size: 16
# n_hdn_layers: 4
# neurons_HL1: 714
# out_channel: 128
# kernel_size: 5
# conv_activation: linear
# dropout_prob: 0.1
# mx_pl_size: 2
# mx_pl_strides: 3
# HL0_ac_fn: relu
# HL1_ac_fn: linear
# HL2_ac_fn: relu
# HL3_ac_fn: linear
# Sequential(
# (0): Unflatten(dim=1, unflattened_size=(1, 45))
# (1): Conv1d(1, 128, kernel_size=(5,), stride=(1,))
# (2): Dropout(p=0.1, inplace=False)
# (3): MaxPool1d(kernel_size=2, stride=3, padding=0, dilation=1, ceil_mode=False)
# (4): Flatten(start_dim=1, end_dim=-1)
# (5): Linear(in_features=1792, out_features=714, bias=True)
# (6): ReLU()
# (7): Linear(in_features=714, out_features=357, bias=True)
# (8): Linear(in_features=357, out_features=178, bias=True)
# (9): ReLU()
# (10): Linear(in_features=178, out_features=89, bias=True)
# (11): Linear(in_features=89, out_features=1, bias=True)
# )
metrics = [RootMeanSquaredError(), "mean_absolute_percentage_error",
"mean_absolute_error"]
# we define the cross validator, and other variables:
batch_size = 16
num_epochs = 100
num_folds = 10
kfold = KFold(n_splits=num_folds, shuffle=True, random_state=42)
fold_num = 1
MAPE_train = []
MAPE_scores = []
RMSE_train = []
RMSE_scores = []
MAE_train = []
MAE_scores = []
R2_train = []
R2_scores = []
for train, test in kfold.split(X_scaled, Y):
# now our data is ready to go into our model.
# we have to set maxpool1d padding to 'valid' (same as 0)
model = Sequential([
layers.Conv1D(128, 5, activation='linear', input_shape=(X_scaled.shape[1], 1)),
layers.Dropout(0.1),
layers.MaxPooling1D(pool_size=2, strides=3, padding='valid'),
layers.Flatten(),
layers.Dense(714, activation='relu'),
layers.Dense(357, activation='linear'),
layers.Dense(178, activation='relu'),
layers.Dense(89, activation='linear'),
layers.Dense(1, activation='linear')
])
model.compile(loss='mae', optimizer=Adam(), metrics=metrics)
history = model.fit(X_scaled[train], Y[train], batch_size=batch_size, epochs=num_epochs, verbose=False)
scores = model.evaluate(X_scaled[test], Y[test], verbose=False)
train_r2 = r2_score(Y[train], model.predict(X_scaled[train]))
R2_train.append(train_r2)
score_r2 = r2_score(Y[test], model.predict(X_scaled[test]))
R2_scores.append(score_r2)
MAPE_train.append(history.history.get(
'mean_absolute_percentage_error')[-1])
MAPE_scores.append(scores[2])
RMSE_train.append(history.history.get('root_mean_squared_error')[-1])
RMSE_scores.append(scores[1])
MAE_train.append(history.history.get('loss')[-1])
MAE_scores.append(scores[0])
print(
f"------------------------------------------Fold {fold_num}------------------------------------------")
print(
f"\tTrain Loss: {history.history.get('loss')[-1]}\tScore Loss: {scores[0]}")
print(
f"\tTrain RMSE: {history.history.get('root_mean_squared_error')[-1]}\tScore RMSE: {scores[1]}")
print(
f"\tTrain MAPE: {history.history.get('mean_absolute_percentage_error')[-1]}\tScore MAPE: {scores[2]}")
if scores[2] == 100.0:
print(history.history.get('mean_absolute_percentage_error'))
print(f"\tTrain R2: {train_r2}\tScore R2: {score_r2}")
fold_num += 1
print(f"\n\nOverall MAE: {np.mean(MAE_scores)} +/- {np.std(MAE_scores)}")
print(f"Overall RMSE: {np.mean(RMSE_scores)} +/- {np.std(RMSE_scores)}")
print(f"Overall MAPE: {np.mean(MAPE_scores)} +/- {np.std(MAPE_scores)}")
print(f"Overall R2: {np.mean(R2_scores)*100} +/- {np.std(R2_scores)*100}\n\n")
print("Delimited table:")
print("MAPE\tRMSE\tMAE\tR2")
for i in range(0, 10):
print("%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f" %
(MAPE_train[i], MAPE_scores[i], RMSE_train[i], RMSE_scores[i], MAE_train[i], MAE_scores[i], R2_train[i], R2_scores[i]))
print("%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t<===Averages" % (np.mean(MAPE_train), np.mean(MAPE_scores),
np.mean(RMSE_train), np.mean(RMSE_scores), np.mean(MAE_train), np.mean(MAE_scores), np.mean(R2_train), np.mean(R2_scores)))
------------------------------------------Fold 1------------------------------------------ Train Loss: 148.8094482421875 Score Loss: 167.15838623046875 Train RMSE: 251.0841827392578 Score RMSE: 262.29779052734375 Train MAPE: 18.920379638671875 Score MAPE: 22.58194351196289 Train R2: 0.8689229038296423 Score R2: 0.8579127356731234 ------------------------------------------Fold 2------------------------------------------ Train Loss: 145.37159729003906 Score Loss: 165.29574584960938 Train RMSE: 243.8059844970703 Score RMSE: 281.7315979003906 Train MAPE: 18.7296142578125 Score MAPE: 21.685359954833984 Train R2: 0.8785562517072538 Score R2: 0.8327142889469984 ------------------------------------------Fold 3------------------------------------------ Train Loss: 146.9127960205078 Score Loss: 182.96728515625 Train RMSE: 247.04754638671875 Score RMSE: 317.23583984375 Train MAPE: 18.726205825805664 Score MAPE: 27.40291404724121 Train R2: 0.8733361221299534 Score R2: 0.7906092925421513 ------------------------------------------Fold 4------------------------------------------ Train Loss: 149.08355712890625 Score Loss: 158.7327423095703 Train RMSE: 250.00448608398438 Score RMSE: 248.38143920898438 Train MAPE: 19.322107315063477 Score MAPE: 20.96881866455078 Train R2: 0.882958304263169 Score R2: 0.8772739124332536 ------------------------------------------Fold 5------------------------------------------ Train Loss: 147.90145874023438 Score Loss: 167.8203125 Train RMSE: 243.8638153076172 Score RMSE: 285.9010314941406 Train MAPE: 19.14512062072754 Score MAPE: 28.2465877532959 Train R2: 0.886995216147858 Score R2: 0.8456339632373818 ------------------------------------------Fold 6------------------------------------------ Train Loss: 148.5126190185547 Score Loss: 155.857666015625 Train RMSE: 248.6488037109375 Score RMSE: 260.4398193359375 Train MAPE: 19.16924285888672 Score MAPE: 22.925308227539062 Train R2: 0.8710542656766282 Score R2: 0.859127297653023 ------------------------------------------Fold 7------------------------------------------ Train Loss: 146.9676513671875 Score Loss: 157.43374633789062 Train RMSE: 248.3674774169922 Score RMSE: 253.45803833007812 Train MAPE: 19.224424362182617 Score MAPE: 22.180830001831055 Train R2: 0.8814724465021461 Score R2: 0.8741816696761926 ------------------------------------------Fold 8------------------------------------------ Train Loss: 147.7354278564453 Score Loss: 176.24685668945312 Train RMSE: 246.31649780273438 Score RMSE: 296.7696228027344 Train MAPE: 18.853086471557617 Score MAPE: 22.290430068969727 Train R2: 0.8875841276424871 Score R2: 0.8034147395607609 ------------------------------------------Fold 9------------------------------------------ Train Loss: 145.80921936035156 Score Loss: 149.3913116455078 Train RMSE: 246.23568725585938 Score RMSE: 241.790771484375 Train MAPE: 19.0592041015625 Score MAPE: 18.13523292541504 Train R2: 0.8898044924757631 Score R2: 0.8840339855617291 ------------------------------------------Fold 10------------------------------------------ Train Loss: 146.8694610595703 Score Loss: 154.2509765625 Train RMSE: 248.35455322265625 Score RMSE: 260.00750732421875 Train MAPE: 19.25370216369629 Score MAPE: 22.30623435974121 Train R2: 0.873761213185837 Score R2: 0.8740281349643028 Overall MAE: 163.5155029296875 +/- 9.895755181963652 Overall RMSE: 270.8013458251953 +/- 22.625691472925382 Overall MAPE: 22.872365951538086 +/- 2.7947371117432027 Overall R2: 84.98930020248915 +/- 3.0373323920165096 Delimited table: MAPE RMSE MAE R2 18.92/22.58 251.08/262.30 148.81/167.16 0.87/0.86 18.73/21.69 243.81/281.73 145.37/165.30 0.88/0.83 18.73/27.40 247.05/317.24 146.91/182.97 0.87/0.79 19.32/20.97 250.00/248.38 149.08/158.73 0.88/0.88 19.15/28.25 243.86/285.90 147.90/167.82 0.89/0.85 19.17/22.93 248.65/260.44 148.51/155.86 0.87/0.86 19.22/22.18 248.37/253.46 146.97/157.43 0.88/0.87 18.85/22.29 246.32/296.77 147.74/176.25 0.89/0.80 19.06/18.14 246.24/241.79 145.81/149.39 0.89/0.88 19.25/22.31 248.35/260.01 146.87/154.25 0.87/0.87 19.04/22.87 247.37/270.80 147.40/163.52 0.88/0.85 <===Averages Wall time: 23min 52s
cnn_model = Sequential([
layers.Conv1D(128, 5, activation='linear', input_shape=(X_scaled.shape[1], 1)),
layers.Dropout(0.1),
layers.MaxPooling1D(pool_size=2, strides=3, padding='valid'),
# layers.Flatten(),
layers.Dense(714, activation='relu'),
layers.Dense(357, activation='linear'),
layers.Dense(178, activation='relu'),
layers.Dense(89, activation='linear'),
layers.Dense(1, activation='linear')
])
plot_model(cnn_model, to_file='cnn_trials/cnn_model_plotted.png', show_shapes=True)